diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..cef4968b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,39 @@ +--- +name: šŸ› Bug Report +about: Create a report to help us improve the Klavis MCP Server. +title: "[BUG] " +labels: 'bug' +assignees: '' + +--- + + + +**1. Describe the bug** +A clear and concise description of what the bug is. + +**2. To Reproduce** +Please provide detailed steps to reproduce the behavior. A minimal reproducible example is highly appreciated. This could be a small code snippet, a repository, or a sequence of API calls. + +**3. Expected behavior** +A clear and concise description of what you expected to happen. + +**4. Environment (please complete the following information):** + +* **Affected MCP Server Name:** +* **MCP Server Version/Commit:** +* **AI Platform:** +* **Klavis Service:** + - [ ] Using Klavis Cloud (https://www.klavis.ai/) + - [ ] Self-hosted + +**5. Self-hosting Details (if applicable):** + +* **Deployment Method:** +* **Operating System:** +* **Relevant Logs:** + +**6. Additional context** +Add any other context about the problem here, such as screenshots, error messages, or anything else that might be helpful. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..7eb81901 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: ✨ Feature Request +about: Suggest an idea or improvement for the Klavis MCP Server. +title: "[FEAT] " +labels: 'enhancement' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** + + +**Describe the solution you'd like** + + +**Describe alternatives you've considered** + + +**Additional context** + diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000..7d735405 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,11 @@ +--- +name: šŸ’¬ Question or Discussion +about: Ask a question or start a discussion about the Klavis MCP Server. +title: "[QUESTION] " +labels: 'question' +assignees: '' + +--- + +**What would you like to ask or discuss?** + diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..f580890c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,27 @@ +## Description + + +## Related issue + + + +## Type of change + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New MCP feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] Documentation update +- [ ] Other (please specify) + +## How has this been tested? +(Add screenshots or recordings here if applicable.) + + +## Checklist + +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing tests pass locally with my changes \ No newline at end of file diff --git a/.github/workflows/mcp-servers-build.yml b/.github/workflows/mcp-servers-build.yml new file mode 100644 index 00000000..a4516b4e --- /dev/null +++ b/.github/workflows/mcp-servers-build.yml @@ -0,0 +1,227 @@ +name: Build and Publish MCP Servers + +on: + push: + branches: + - main + paths: + - 'mcp_servers/**' + - '_oauth_support/**' + workflow_dispatch: + inputs: + servers: + description: 'Comma-separated list of MCP servers to build (e.g., mem0,openai,github)' + required: true + type: string + +env: + REGISTRY: ghcr.io + IMAGE_PREFIX: ${{ github.repository_owner }} + BUILD_ARCHS: amd64 + +jobs: + detect-changes: + name: Detect Changed MCP Servers + runs-on: ubuntu-latest + outputs: + changed-servers: ${{ steps.changes.outputs.changed-servers }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Detect changed MCP servers + id: changes + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + # Manual trigger - use input servers + CHANGED_SERVERS="${{ github.event.inputs.servers }}" + echo "Manual build requested for: $CHANGED_SERVERS" + else + # Auto trigger - detect changed files + CHANGED_FILES=$(git diff --name-only HEAD~1 HEAD) + + echo "Changed files:" + echo "$CHANGED_FILES" + + # Check if _oauth_support changed - if so, build all OAuth-supported servers + OAUTH_CHANGED=false + for file in $CHANGED_FILES; do + if [[ $file == _oauth_support/* ]]; then + OAUTH_CHANGED=true + break + fi + done + + # Extract unique MCP server directories that have changes + CHANGED_SERVERS="" + if [ "$OAUTH_CHANGED" = "true" ]; then + # _oauth_support changed - build all OAuth-supported servers + echo "_oauth_support changed, building all OAuth-supported servers" + OAUTH_SERVERS=$(jq -r 'keys[]' _oauth_support/server_name.json) + for server in $OAUTH_SERVERS; do + if [ -f "mcp_servers/${server}/Dockerfile" ]; then + CHANGED_SERVERS="$CHANGED_SERVERS$server," + fi + done + else + # Normal change detection for individual servers + for file in $CHANGED_FILES; do + if [[ $file == mcp_servers/* ]]; then + # Extract server folder path (mcp_servers/server_name/) + server_path=$(echo "$file" | cut -d'/' -f1-2) + server_name=$(echo "$file" | cut -d'/' -f2) + + # Check if Dockerfile exists in the server path + if [ -f "${server_path}/Dockerfile" ]; then + # Add to list if not already present + if [[ "$CHANGED_SERVERS" != *"$server_name"* ]]; then + CHANGED_SERVERS="$CHANGED_SERVERS$server_name," + fi + fi + fi + done + fi + + # Remove trailing comma + CHANGED_SERVERS=${CHANGED_SERVERS%,} + fi + + echo "Changed servers: $CHANGED_SERVERS" + + if [ -n "$CHANGED_SERVERS" ]; then + # Convert comma-separated to JSON array + SERVERS_JSON=$(echo "[$CHANGED_SERVERS]" | sed 's/,/","/g' | sed 's/\[/["/' | sed 's/\]/"]/') + else + SERVERS_JSON="[]" + fi + + echo "changed-servers=$SERVERS_JSON" >> $GITHUB_OUTPUT + echo "Servers to build: $SERVERS_JSON" + + build-and-publish: + name: Build and Publish + runs-on: ubuntu-latest + needs: detect-changes + strategy: + fail-fast: false + matrix: + server: ${{ fromJson(needs.detect-changes.outputs.changed-servers) }} + permissions: + contents: read + packages: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Get image URL + id: get-image-url + run: | + # Check if custom mapping exists, otherwise use original name + if [ -f ".github/workflows/server-name-mapping.json" ]; then + MAPPED_NAME=$(jq -r --arg name "${{ matrix.server }}" '.[$name] // empty' .github/workflows/server-name-mapping.json) + fi + + if [ -z "$MAPPED_NAME" ]; then + # Default: use original server name + SERVER_NAME="${{ matrix.server }}" + else + # Use custom mapping + SERVER_NAME="$MAPPED_NAME" + fi + + echo "Using server name: $SERVER_NAME for ${{ matrix.server }}" + echo "BASE_IMAGE=$(echo "${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}/${SERVER_NAME}-mcp-server" | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT + + - name: Build and tag basic image + id: build-base + uses: redhat-actions/buildah-build@v2 + with: + image: ${{ steps.get-image-url.outputs.BASE_IMAGE }} + tags: | + ${{ github.sha }} + latest + context: . + containerfiles: | + mcp_servers/${{ matrix.server }}/Dockerfile + archs: ${{ env.BUILD_ARCHS }} + labels: | + org.opencontainers.image.source=https://github.com/${{ github.repository }} + + - name: Push basic image to registry + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ steps.get-image-url.outputs.BASE_IMAGE }} + tags: | + ${{ github.sha }} + latest + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Check if server needs OAuth and get command + id: get-command + run: | + # Check if this server needs OAuth support + OAUTH_SERVER_NAME=$(jq -r --arg name "${{ matrix.server }}" '.[$name] // empty' _oauth_support/server_name.json) + + if [ -z "$OAUTH_SERVER_NAME" ]; then + echo "Server ${{ matrix.server }} does not need OAuth support, skipping..." + echo "oauth-server-name=" >> $GITHUB_OUTPUT + exit 0 + fi + + echo "Server ${{ matrix.server }} needs OAuth support, OAuth name: $OAUTH_SERVER_NAME" + echo "oauth-server-name=$OAUTH_SERVER_NAME" >> $GITHUB_OUTPUT + + # Get original entrypoint and cmd + BASE_IMAGE="${{ steps.get-image-url.outputs.BASE_IMAGE }}" + ORIGINAL_ENTRYPOINT=$(podman inspect --format '{{if .Config.Entrypoint}}{{json .Config.Entrypoint}}{{else}}[]{{end}}' "$BASE_IMAGE" 2>/dev/null || echo '[]') + ORIGINAL_CMD=$(podman inspect --format '{{if .Config.Cmd}}{{json .Config.Cmd}}{{else}}[]{{end}}' "$BASE_IMAGE" 2>/dev/null || echo '[]') + + # Combine them into final command + FINAL_COMMAND=$(echo -e "$ORIGINAL_ENTRYPOINT\n$ORIGINAL_CMD" | jq -sc 'add') + + echo "Original entrypoint: $ORIGINAL_ENTRYPOINT" + echo "Original cmd: $ORIGINAL_CMD" + echo "Final command: $FINAL_COMMAND" + + # Set outputs for next step + echo "final-command=$FINAL_COMMAND" >> $GITHUB_OUTPUT + + - name: Prepare OAuth Dockerfile + if: steps.get-command.outputs.oauth-server-name != '' + run: | + sed -i 's|\${MCP_SERVER_NAME}|${{ steps.get-command.outputs.oauth-server-name }}|g' _oauth_support/docker/Dockerfile.template + sed -i 's|\${ENTRYPOINT_COMMAND}|${{ steps.get-command.outputs.final-command }}|g' _oauth_support/docker/Dockerfile.template + + - name: Build OAuth wrapper image + id: build + if: steps.get-command.outputs.oauth-server-name != '' + uses: redhat-actions/buildah-build@v2 + with: + image: ${{ steps.get-image-url.outputs.BASE_IMAGE }} + tags: | + ${{ github.sha }}-oauth + latest + context: _oauth_support + containerfiles: | + _oauth_support/docker/Dockerfile.template + archs: ${{ env.BUILD_ARCHS }} + build-args: | + BASE_IMAGE=${{ steps.get-image-url.outputs.BASE_IMAGE }}:${{ github.sha }} + labels: | + org.opencontainers.image.source=https://github.com/${{ github.repository }} + + - name: Push to registry + if: steps.get-command.outputs.oauth-server-name != '' + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ steps.get-image-url.outputs.BASE_IMAGE }} + tags: | + ${{ github.sha }}-oauth + latest + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/python-sdk-release.yml b/.github/workflows/python-sdk-release.yml new file mode 100644 index 00000000..79f86d28 --- /dev/null +++ b/.github/workflows/python-sdk-release.yml @@ -0,0 +1,40 @@ +name: Publish Python SDK + +on: + workflow_dispatch: + inputs: + version: + description: "The version of the Python SDK that you would like to release" + required: true + type: string + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Fern CLI + run: npm install -g fern-api + + - name: Release Python SDK + env: + FERN_TOKEN: ${{ secrets.FERN_TOKEN }} + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + run: | + fern generate --group python-sdk --version ${{ inputs.version }} --log-level debug + + - name: Create GitHub Release + uses: softprops/action-gh-release@v1 + with: + tag_name: python-v${{ inputs.version }} + name: Python SDK v${{ inputs.version }} + # The body will be auto-generated based on commits since the last tag + generate_release_notes: true + # This will create the tag if it doesn't exist + make_latest: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/server-name-mapping.json b/.github/workflows/server-name-mapping.json new file mode 100644 index 00000000..6244b636 --- /dev/null +++ b/.github/workflows/server-name-mapping.json @@ -0,0 +1,13 @@ +{ + "brave_search": "brave-search", + "cal_com": "calcom", + "firecrawl_deep_research": "firecrawl-deep-research", + "google_calendar": "google-calendar", + "google_docs": "google-docs", + "google_drive": "google-drive", + "google_jobs": "google-jobs", + "google_sheets": "google-sheets", + "google_slides": "google-slides", + "hacker_news": "hacker-news", + "report_generation": "report-generation" +} diff --git a/.github/workflows/sync-openapi.yml b/.github/workflows/sync-openapi.yml new file mode 100644 index 00000000..9094f762 --- /dev/null +++ b/.github/workflows/sync-openapi.yml @@ -0,0 +1,24 @@ +name: Sync OpenAPI Specs # can be customized +on: # additional custom triggers can be configured, examples below + workflow_dispatch: # manual dispatch + push: + branches: + - main # on push to main + schedule: + - cron: '0 3 * * *' # everyday at 3:00 AM UTC + +jobs: + update-from-source: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.OPENAPI_SYNC_TOKEN }} + - name: Update API with Fern + uses: fern-api/sync-openapi@v2 + with: + update_from_source: true + token: ${{ secrets.OPENAPI_SYNC_TOKEN }} + branch: 'update-api' + auto_merge: false + add_timestamp: true diff --git a/.github/workflows/typescript-sdk-release.yml b/.github/workflows/typescript-sdk-release.yml new file mode 100644 index 00000000..910c19c6 --- /dev/null +++ b/.github/workflows/typescript-sdk-release.yml @@ -0,0 +1,40 @@ +name: Publish TypeScript SDK + +on: + workflow_dispatch: + inputs: + version: + description: "The version of the TypeScript SDK that you would like to release" + required: true + type: string + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Fern CLI + run: npm install -g fern-api + + - name: Release TypeScript SDK + env: + FERN_TOKEN: ${{ secrets.FERN_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + fern generate --group ts-sdk --version ${{ inputs.version }} --log-level debug + + - name: Create GitHub Release + uses: softprops/action-gh-release@v1 + with: + tag_name: ts-v${{ inputs.version }} + name: TypeScript SDK v${{ inputs.version }} + # The body will be auto-generated based on commits since the last tag + generate_release_notes: true + # This will create the tag if it doesn't exist + make_latest: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..62b73f8b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,152 @@ +# Contributing to Klavis AI + +Thank you for your interest in contributing to Klavis AI! This document outlines the process and conventions we use for contributions, pull requests, and commit messages. + +## Contributor License Agreement (CLA) + +Before we can accept your contributions, we require all contributors to sign our Contributor License Agreement (CLA). This is a one-time process. + +[Contributor License Agreement](https://cla-assistant.io/Klavis-AI/klavis) + +The CLA helps ensure that everyone who submits a contribution has the legal right to do so and agrees to the terms under which the contribution is accepted. Without this agreement, we cannot review or accept your contributions. + +## Table of Contents + +- [Contributor License Agreement (CLA)](#contributor-license-agreement-cla) +- [Code of Conduct](#code-of-conduct) +- [Getting Started](#getting-started) +- [Development Workflow](#development-workflow) +- [Commit Message Convention](#commit-message-convention) +- [Pull Request Process](#pull-request-process) +- [Code Style Guidelines](#code-style-guidelines) +- [Testing Guidelines](#testing-guidelines) +- [License](#license) + +## Code of Conduct + +We expect all contributors to follow our Code of Conduct. Please be respectful and inclusive in all interactions. + +## Getting Started + +1. Fork the repository +2. Clone your fork: `git clone https://github.com/yourusername/klavisAi.git` +3. Add the upstream remote: `git remote add upstream https://github.com/Klavis-AI/klavis.git` +4. Create a new branch for your feature or bug fix +5. Make your changes +6. Submit a pull request + +## Development Workflow + +### Monorepo Structure + +The project is organized as a monorepo with multiple MCP servers and clients: + +- `mcp_servers/` - Contains individual Model Context Protocol servers +- `mcp_clients/` - Contains clients that connect to MCP servers + +When making changes, be mindful of the scope of your change and any potential impacts on other components. + +### Building MCP Servers + +If you're interested in contributing a new MCP server to the project, we have a comprehensive guide to help you get started: + +**[šŸ“– MCP Server Guide](MCP_SERVER_GUIDE.md)** - A complete guide covering: +- What MCP servers are and how they work +- How to design effective tools for AI agents +- Development best practices and testing requirements +- Step-by-step instructions with examples + +This guide is essential reading for anyone wanting to build high-quality MCP servers that integrate well with AI applications. + +## Commit Message Convention + +We follow a simplified version of the [Conventional Commits](https://www.conventionalcommits.org/) specification for commit messages. + +Format: + +``` +(): +``` + +### Type + +Must be one of the following: + +- `feat`: A new feature +- `fix`: A bug fix +- `docs`: Documentation only changes +- `style`: Changes that do not affect the meaning of the code (formatting, etc.) +- `refactor`: A code change that neither fixes a bug nor adds a feature +- `perf`: A code change that improves performance +- `test`: Adding missing or correcting existing tests +- `chore`: Changes to the build process or auxiliary tools +- `ci`: Changes to CI configuration files and scripts + +### Scope + +The scope is optional and can be anything specifying the place of the commit change. For example: `slack`, `github`, `supabase`, etc. + +### Subject + +The subject contains a succinct description of the change: +- Use the imperative, present tense: "change" not "changed" nor "changes" +- Don't capitalize the first letter +- No period (.) at the end + +### Examples + +``` +feat(slack): add user profile lookup functionality +``` + +``` +fix(github): resolve PR comment retrieval issue +``` + +``` +docs(core): update installation instructions +``` + +``` +refactor(supabase): simplify authentication flow +``` + +## Pull Request Process + +1. **Create a descriptive PR title** following the commit message format: + ``` + (): + ``` + +2. **Fill in the PR template** with: + - A clear description of the changes + - The motivation for the changes + - Any additional context that might be helpful + - Screenshots if applicable + +3. **Link related issues** using GitHub keywords like "Closes #123" or "Fixes #456" + +4. **Keep PRs focused and reasonably sized** - Split large changes into smaller, more manageable PRs when possible + +5. **Update documentation** if your changes affect the public API or user experience + +6. **Add tests** for new features or bug fixes + +7. **Make sure all tests pass** and code style checks complete successfully + +8. **Request reviews** from appropriate team members + +9. **Address review feedback** promptly and thoroughly + +10. **Rebase your branch** on the latest main branch before merging + + +## Testing Guidelines + +- Write tests for all new features and bug fixes +- Maintain or improve code coverage +- Test across multiple Node.js/Python/go versions when appropriate + +## License + +By contributing to Klavis AI, you agree that your contributions will be licensed under the project's [Apache 2.0 license](LICENSE). \ No newline at end of file diff --git a/LICENSE b/LICENSE index ff4f0d0b..261eeb9e 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,201 @@ -MIT License - -Copyright (c) 2025 Klavis AI - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LLM.md b/LLM.md new file mode 100644 index 00000000..5ddfc466 --- /dev/null +++ b/LLM.md @@ -0,0 +1,272 @@ +# Klavis AI - LLM API Documentation + +## Overview + +Klavis AI is an open-source MCP (Model Context Protocol) integration platform that that let AI agents use any tools reliably at any scale. + +**Key Features:** +- Instant Integration with Python/TypeScript SDKs or REST API +- Built-in OAuth flows and API key management +- 100+ tools across CRM, GSuite, dev tools, sales, search, etc. +- Multi-platform LLM provider support (OpenAI, Anthropic, Gemini, etc.) +- Strata: One MCP server for AI agents to use tools progressively at any scale + +## Installation + +## REST API + +**Base URL:** `https://api.klavis.ai` +**Authentication:** Bearer token (API key in Authorization header) + +### Create Strata Server +```bash +curl -X POST https://api.klavis.ai/mcp-server/strata/create \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "userId": "user_123", + "servers": ["GitHub", "Linear"] + }' +``` + +**Response:** +```json +{ + "strataServerUrl": "https://strata.klavis.ai/mcp/?strata_id=...", + "strataId": "strata_abc123", + "addedServers": ["GitHub", "Linear"], + "oauthUrls": { + "GitHub": "https://api.klavis.ai/oauth/github/authorize?instance_id=...", + "Linear": "https://api.klavis.ai/oauth/linear/authorize?instance_id=..." + } +} +``` + +### Add Servers to Strata +```bash +curl -X POST https://api.klavis.ai/mcp-server/strata/add \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "strata_abc123", + "servers": ["Slack", "Notion"] + }' +``` + +### Get Available Servers +```bash +curl -X GET https://api.klavis.ai/mcp-server/servers \ + -H "Authorization: Bearer YOUR_API_KEY" +``` + + +### Python SDK +```bash +pip install klavis +``` + +### TypeScript/JavaScript SDK +```bash +npm install klavis +``` + +### API Key Setup +Sign up at [klavis.ai](https://www.klavis.ai) and create your [API key](https://www.klavis.ai/home/api-keys). + +## Quick Start + +### Python SDK + +```python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +# Initialize client +klavis_client = Klavis(api_key="your_api_key") + +# Create Strata MCP server (aggregates multiple MCP servers) +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GITHUB, McpServerName.LINEAR], + user_id="user_123" +) + +# Handle OAuth if needed +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + print(f"Authorize {server_name}: {oauth_url}") + +``` + +### TypeScript SDK + +```typescript +import { Klavis } from 'klavis'; + +const klavisClient = new Klavis({ apiKey: 'your_api_key' }); + +// Create Strata MCP server +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Github, Klavis.McpServerName.Linear], + userId: "user_123" +}); +``` + +## Use Case - OpenAI Integration + +```python +import json +from openai import OpenAI +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +openai_client = OpenAI(api_key="openai_key") +klavis_client = Klavis(api_key="klavis_key") + +# Create Strata server with multiple services +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="user_123" +) + +# Handle OAuth authorization for each service +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + print(f"Please open this URL to complete {server_name} OAuth authorization: {oauth_url}") + +def openai_with_mcp_server(mcp_server_url: str, user_query: str): + messages = [ + {"role": "system", "content": "You are a helpful assistant. Use the available tools to answer the user's question."}, + {"role": "user", "content": user_query} + ] + + tools_info = klavis_client.mcp_server.list_tools( + server_url=mcp_server_url, + format=ToolFormat.OPENAI + ) + + max_iterations = 20 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + response = openai_client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tools=tools_info.tools, + tool_choice="auto", + ) + + assistant_message = response.choices[0].message + + if assistant_message.tool_calls: + messages.append({ + "role": "assistant", + "content": assistant_message.content, + "tool_calls": [ + { + "id": tc.id, + "type": "function", + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments + } + } + for tc in assistant_message.tool_calls + ] + }) + + for tool_call in assistant_message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) + + print(f"Calling: {tool_name}") + print(f"Arguments: {json.dumps(tool_args, indent=2)}") + + function_result = klavis_client.mcp_server.call_tools( + server_url=mcp_server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(function_result) + }) + continue + else: + messages.append({"role": "assistant", "content": assistant_message.content}) + return assistant_message.content + + return "Max iterations reached without final response" + +# Run the integration +result = openai_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 emails and summarize them in a Slack message to #general" +) + +print(f"\nšŸ¤– Final Response: {result}") +``` + +## Strata Server Management + +### Add +Add servers to an existing Strata MCP server. +```python +klavis_client.mcp_server.add_servers_to_strata( + strata_id="strata_abc123", + servers=[McpServerName.SLACK, McpServerName.NOTION] +) +``` + +### Remove +Delete servers from an existing Strata MCP server. +```python +klavis_client.mcp_server.delete_servers_from_strata( + strata_id="strata_abc123", + servers=["Slack"] +) +``` + +### Get Strata Info +```python +info = klavis_client.mcp_server.get_strata_instance( + strata_id="strata_abc123" +) +print(info.connected_servers) # List of connected servers +print(info.oauth_urls) # OAuth URLs for authentication +``` + +## Tool Formats + +Klavis supports multiple tool formats for different LLM providers: + +- **`openai`** - OpenAI function calling format +- **`anthropic`** - Anthropic Claude tool format +- **`gemini`** - Google Gemini function declarations +- **`mcp_native`** - Standard MCP protocol format + + +## Environment Variables + +```bash +KLAVIS_API_KEY=your_klavis_api_key +``` + +## Best Practices + +1. **Use Strata Servers**: Aggregate ond or multiple MCP servers into one endpoint for easier management +2. **Handle OAuth Gracefully**: Direct users to OAuth URLs and wait for completion +3. **Reuse Connections**: Create Strata servers once per user and reuse them +5. **Secure API Keys**: Use environment variables and never commit keys to version control + +## Support & Resources + +- **Documentation**: [www.klavis.ai/docs](https://www.klavis.ai/docs) +- **OpenAPI** [klavis](https://api.klavis.ai/openapi.json) +- **Website**: [klavis.ai](https://www.klavis.ai) +- **PyPI**: [klavis](https://pypi.org/project/klavis/) +- **npm**: [klavis](https://www.npmjs.com/package/klavis) + diff --git a/MCP_SERVER_GUIDE.md b/MCP_SERVER_GUIDE.md new file mode 100644 index 00000000..430f1977 --- /dev/null +++ b/MCP_SERVER_GUIDE.md @@ -0,0 +1,72 @@ +# Contributor's Guide to Building MCP Servers + +Welcome, contributor! This guide provides a concise overview for building new Model Context Protocol (MCP) servers and contributing them to the Klavis AI open-source repository. It's designed for software engineers who are new to AI agents and MCP. + +## What is an MCP Server? + +An MCP server is a specialized backend that acts as a bridge between a Large Language Model (LLM) and external systems. It exposes a set of "tools" that the LLM can use to interact with the world, such as reading files, searching databases, or calling APIs. + +The Model Context Protocol (MCP) is an open standard that unifies how AI models communicate with these tools. This avoids the need for custom, one-off integrations. The model works via a simple client-server architecture: an MCP client (like an AI assistant in your editor) sends a request to an MCP server, which executes a specific tool and returns the result. + +## How to Design Good Tools + +The effectiveness of an MCP server depends entirely on the design of its tools. The LLM relies on the tool's name and description to understand its purpose and decide when to use it. Your goal is to make this decision as easy and accurate as possible for the model. + +* **Use Natural Language:** Tool names and descriptions should be clear, descriptive, and human-readable. Avoid jargon or cryptic abbreviations. For example, use `search_customer_by_email` instead of `cust_find_eml`. +* **Write Detailed Descriptions:** The description is your most important documentation. Clearly explain what the tool does, what each parameter is for, and what the tool returns. Mention any important side effects or requirements. +* **Think Like the LLM:** Ask yourself: "If I only read the name and description, would I know exactly when to use this tool and what to expect?" If the answer is no, refine your description. +* **Keep Tools Atomic:** Design tools to perform one specific job well. Instead of a single `manage_files` tool, create smaller, more focused tools like `read_file`, `write_to_file`, and `list_directory_contents`. + +## Getting Started with Development + +You can build an MCP server in any language that can serve an HTTP endpoint or communicate over standard I/O. The official MCP provides SDKs, including [Python](https://github.com/modelcontextprotocol/python-sdk) and [TypeScript](https://github.com/modelcontextprotocol/typescript-sdk) that make development much easier. + +## Important Resources for MCP Development + +When starting with MCP development, it's essential to understand the architecture and operational modes of MCP. Here are some key resources: + +- **MCP Architecture Overview:** Provides a comprehensive understanding of the MCP client-server architecture, including the roles of MCP hosts, clients, and servers. It explains the data and transport layers, which are crucial for effective communication and context exchange. [Model Context Protocol Architecture](https://modelcontextprotocol.io/docs/learn/architecture). + +- **MCP Server Specification:** Details the operational modes of MCP, including local and remote server configurations, and the supported content protocols such as images, text, audio, and binary data. Understanding these specifications is vital for implementing robust MCP servers. [MCP Server Specification](https://modelcontextprotocol.io/specification/2025-06-18/server). + +These documents will help you grasp the foundational concepts and ensure your MCP server implementations are aligned with the protocol standards. + +## Example + +You can check the [mcp_server](/mcp_servers/) folder for a list of examples. E.g. [attio](/mcp_servers/attio/) is written is Typescript and [airtable](/mcp_servers/airtable/) is written in Python. + +## How to Test Your Server + +Testing is a critical part of the contribution process. Your testing must demonstrate that your tools not only work but are also correctly understood and utilized by an AI client. + +### Step 1: Connect to a Client + +First, run your local MCP server and connect it to a client application. Common clients that supports local MCP servers include: + +* **Claude Desktop:** An official desktop application that provides a chat interface for interacting with your server's tools. [Detailed documentations](https://support.anthropic.com/en/articles/10949351-getting-started-with-local-mcp-servers-on-claude-desktop) are available. +* **Cursor:** An AI-first code editor that can be configured to connect to custom MCP servers. [Detailed documentations (click the remote server tab)](https://docs.cursor.com/en/context/mcp#using-mcp-json) are available. +* **Visual Studio Code:** Using specific extensions, you can configure VS Code to act as an MCP client. [Detailed documentations](https://code.visualstudio.com/docs/copilot/chat/mcp-servers#_add-an-mcp-server) are available. +* **Streamable HTTP Client:** For low-level testing, the [steamable_http_client.py](/mcp-clients/src/mcp_clients/streamable_http_client.py) in the Klavis repository is also good for sending raw requests to your server and inspecting its direct responses. + +### Step 2: Test with Natural Language and Document It + +For every tool you add, you must perform an end-to-end test and document the result. + +1. **Formulate a Test Query:** Devise a natural language prompt or question that should cause an AI agent to use your specific tool. For the `read_file`, a good query would be: *"Please read the file at 'docs/readme.md' and summarize it for me."* +2. **Execute and Verify:** Run this query in your client application (like Claude Desktop or Cursor). Confirm that the client correctly identifies and calls your tool with the right parameters, and that the tool returns the expected output. +3. **Record Proof of Correctness:** You are required to document this successful test. **You must record a short video or capture multiple screenshots** that clearly show: + * The natural language query you used. + * The log output showing your tool being called correctly. + * The final, correct result being displayed to the user. + +This recording is mandatory. It provides clear, undeniable proof of functionality for reviewers and serves as living documentation for your contribution. + +## Guidelines for Function Documentation + +For each function you implement, ensure to provide the following details: + +- **AI Prompt Words:** Clearly define the prompt words that the AI should use to invoke the function. +- **Title:** Provide a concise and descriptive title for the function. +- **Parameter Usage:** Explain how each parameter should be used, including whether they are optional and their default values. + +These guidelines will help ensure that your functions are easily understood and correctly utilized by AI clients. \ No newline at end of file diff --git a/README.md b/README.md index d9894d9f..ed8d701b 100644 --- a/README.md +++ b/README.md @@ -1,120 +1,146 @@
- +
-

Open Source, easy to use MCPs šŸš€

+
-[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) +[![Documentation](https://img.shields.io/badge/Documentation-šŸ“–-green)](https://www.klavis.ai/docs) [![Website](https://img.shields.io/badge/Website-🌐-purple)](https://www.klavis.ai) -[![Discord](https://img.shields.io/discord/1356754991989133462?color=7289DA&label=Community&logo=discord&logoColor=white)](https://discord.com/invite/P6fFgv2w) -[![YouTube Demo](https://img.shields.io/badge/Demo-YouTube-red)](https://www.youtube.com/@KlavisAI-w2l) - -## šŸ“š TL;DR - -Klavis AI is building open source, easy to use MCPs on Slack, Discord and Web. We provide: - -- šŸ’¬ **Slack & Discord & Web Clients**: Run MCPs directly from your favorite messaging platforms -- ā˜ļø **Hosted MCP Servers**: Access powerful tools without infrastructure management -- šŸŽ›ļø **Simple Web UI**: Configure and manage everything with no coding required - -Whether you're a non-technical user wanting to leverage AI workflows or a developer looking to build and scale MCPs, Klavis makes it simple. - -## šŸŽ¬ Watch Our Demo - -See how easy it is to use MCPs (like our Report Generator, YouTube tools, and Document Converters) directly from Slack/Discord: - -[![Klavis AI Demo](https://i3.ytimg.com/vi/9-QQAhrQWw8/maxresdefault.jpg)](https://www.youtube.com/watch?v=9-QQAhrQWw8) - -You can find more case study videos on [our website](https://www.klavis.ai). +[![Discord](https://img.shields.io/badge/Discord-Join-7289DA?logo=discord&logoColor=white)](https://discord.gg/p7TuTEcssn) -## šŸš€ Quick Start +Strata - One MCP server for AI agents to handle thousands of tools | Product Hunt -For detailed MCP client setup instructions, please refer to the platform-specific documentation: -- [**Discord Bot Setup Guide**](mcp_clients/README-Discord.md) - Complete instructions for setting up and running the Discord bot -- [**Slack Bot Setup Guide**](mcp_clients/README-Slack.md) - Step-by-step guide for creating a Slack app and connecting it +
-For detailed MCP server setup instructions, please refer to the README file under each server folder. +## šŸŽÆ Choose Your Solution -- [**Discord**](mcp_servers/discord/README.md): For Discord API integration -- [**Document Conversion (Pandoc)**](mcp_servers/pandoc/README.md): Convert between various file formats using Pandoc -- [**Firecrawl**](mcp_servers/firecrawl/README.md): For web crawling and data collection -- [**Firecrawl Deep Research**](mcp_servers/firecrawl_deep_research/README.md): For deeper web research tasks -- [**GitHub**](mcp_servers/github/README.md): Perform GitHub repository operations -- [**Markitdown**](mcp_servers/markitdown/README.md): Markdown conversion tool -- [**Postgres**](mcp_servers/postgres/README.md): For PostgreSQL database operations -- [**Report Generation**](mcp_servers/report_generation/README.md): Create professional web reports from user query -- [**Resend**](mcp_servers/resend/README.md): For email services -- [**Slack**](mcp_servers/slack/README.md): For Slack API integration -- [**Supabase**](mcp_servers/supabase/README.md): For database operations -- [**YouTube**](mcp_servers/youtube/README.md): Extract and convert YouTube video information +
+ + + + + +
+
+
+

šŸ“¦ Strata

+

off-the-shelf intelligent connectors for your AI agent

+
+
+ + + +
+
+
+
+
+

šŸ› ļø MCP Integrations

+

100+ prebuilt integrations out-of-the-box, with OAuth support

+
+
+ + + +
+
+
+
-## šŸ—ļø Architecture +## Quick Start -Klavis consists of two main components: +### Option 1: Cloud-hosted - [klavis.ai](https://www.klavis.ai) -### MCP Servers +[Quickstart guide →](https://www.klavis.ai/docs/quickstart) -Located in the `mcp_servers/` directory, these service-specific modules expose capabilities as tools: +### Option 2: Self-host -- **Report Generation**: Create professional reports from various data sources -- **YouTube**: Download, analyze, and transform YouTube content -- **Document Conversion**: Convert between various file formats using Pandoc -- **GitHub**: Perform GitHub repository operations -- **Slack**: For Slack API integration -- **Supabase**: For database operations -- **Firecrawl**: For web crawling and data collection -- **Resend**: For email services -- **Postgres**: For PostgreSQL database operations -- **Discord**: For Discord API integration +```bash +# Run any MCP Integration +docker pull ghcr.io/klavis-ai/github-mcp-server:latest +docker run -p 5000:5000 ghcr.io/klavis-ai/github-mcp-server:latest -### MCP Clients +# Install Open Source Strata locally +pipx install strata-mcp +strata add --type stdio playwright npx @playwright/mcp@latest +``` -Located in the `mcp_clients/` directory, these client applications connect to MCP servers and interface with end-user platforms: +### Option 3: SDK -- **Discord Bot**: Interactive AI assistant for Discord -- **Slack Bot**: Interactive AI assistant for Slack -- **Base Client**: Shared functionality for all platform clients +```python +# Python SDK +from klavis import Klavis +from klavis.types import McpServerName -## 🧩 Extending Klavis +klavis = Klavis(api_key="your-key") -### Adding a New Tool +# Create Strata instance +strata = klavis_client.mcp_server.create_strata_server( + user_id="user123", + servers=[McpServerName.GMAIL, McpServerName.SLACK], +) -1. Create a new directory in `mcp_servers/` -2. Implement the MCP server interface -3. Register your tools with appropriate schemas -4. Connect to your client through the standard SSE protocol +# Or use individual MCP servers +gmail = klavis.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="user123", +) +``` -### Adding a New Client Platform +```typescript +// TypeScript SDK +import { KlavisClient, McpServerName } from 'klavis'; -1. Create a new client module in `mcp_clients/` -2. Extend the `base_bot.py` functionality -3. Implement platform-specific message handling -4. Connect to MCP servers using `mcp_client.py` +const klavis = new KlavisClient({ apiKey: 'your-api-key' }); -## šŸ¤ Contributing +// Create Strata instance +const strata = await klavis.mcpServer.createStrataServer({ + userId: "user123", + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], +}); -We love contributions! Join our [Discord community](https://discord.gg/cVNXvzs5) to discuss ideas and get help. +// Or use individual MCP servers +const gmail = await klavis.mcpServer.createServerInstance({ + serverName: McpServerName.GMAIL, + userId: "user123" +}); +``` -## šŸ“š Citation +### Option 4: REST API + + +```bash +# Create Strata server +curl -X POST "https://api.klavis.ai/v1/mcp-server/strata" \ + -H "Authorization: Bearer your-api-key" \ + -H "Content-Type: application/json" \ + -d '{ + "user_id": "user123", + "servers": ["GMAIL", "SLACK"] + }' + +# Create individual MCP server +curl -X POST "https://api.klavis.ai/v1/mcp-server/instance" \ + -H "Authorization: Bearer your-api-key" \ + -H "Content-Type: application/json" \ + -d '{ + "server_name": "GMAIL", + "user_id": "user123" + }' +``` -If you use Klavis in your research or project, please cite: -```bibtex -@software{klavis2024, - author = {Klavis AI}, - title = {Klavis: Open-Source Infrastructure for Model Context Protocols}, - year = {2024}, - publisher = {GitHub}, - url = {https://github.com/klavis-ai/klavis} -} -``` +## Resources -## šŸ“œ License +- šŸ“– [Documentation](https://www.klavis.ai/docs) +- šŸ’¬ [Discord Community](https://discord.gg/p7TuTEcssn) +- šŸ› [Report Issues](https://github.com/klavis-ai/klavis/issues) +- 🌐 [Klavis AI Website](https://www.klavis.ai) -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. +---
-šŸ™ Thanks for checking out Klavis AI! We're excited to hear your thoughts and build this with the community. -
\ No newline at end of file +

Made with ā¤ļø by the Klavis Team

+ \ No newline at end of file diff --git a/_oauth_support/README.md b/_oauth_support/README.md new file mode 100644 index 00000000..c6e5b064 --- /dev/null +++ b/_oauth_support/README.md @@ -0,0 +1,112 @@ +# OAuth Support Layer for MCP Servers + +This directory provides OAuth authentication support for MCP servers that require external service authentication. + +## How it Works + +### 1. Wrapper Architecture +- **Base Image**: Original MCP server container (e.g., `github-mcp-server`) +- **OAuth Wrapper**: Adds OAuth authentication layer on top of base image +- **Transparent Proxy**: OAuth layer handles authentication, then transparently starts the original MCP server + +### 2. Components + +#### `docker/Dockerfile.template` +- Builds OAuth wrapper image based on base image +- Installs authentication scripts and dependencies +- Replaces container entry point + +#### `docker/entrypoint_wrapper.sh` +- Container startup entry point +- Parses command line arguments (server name and original command) +- Calls OAuth authentication script +- Executes original MCP server command + +#### `oauth_acquire.sh` +- Core OAuth authentication logic +- Calls Klavis API to create authentication instance +- Shows authentication URL to user +- Polls and waits for user to complete authentication +- Sets `AUTH_DATA` environment variable + +#### `server_name.json` +- Maps internal server names to display names +- Used by GitHub Actions to determine which servers need OAuth +- Example: `"github": "GitHub"` + +### 3. Authentication Flow + +``` +1. Container starts → entrypoint_wrapper.sh +2. Calls oauth_acquire.sh +3. Requests Klavis API to create OAuth instance +4. Displays authentication URL šŸ”—šŸ”—šŸ”— +5. User completes authentication in browser +6. Polls to check authentication status +7. Gets AUTH_DATA and sets environment variable +8. Starts original MCP server (with auth info) +``` + +### 4. GitHub Actions Integration + +#### Build Process: +1. **Build Base Image** - Original MCP server (tagged with `latest` and commit SHA) +2. **Check OAuth Requirements** - Query `server_name.json` +3. **Extract Container Commands** - Use `podman inspect` to get original entry point +4. **Build OAuth Image** - If OAuth needed, build wrapper version +5. **Push Images** - Push OAuth version with multiple tags: + - `{commit-sha}-oauth` - OAuth version with specific commit + - `latest` - **OAuth version becomes the default latest tag** + +#### Conditional Building: +- Only servers listed in `server_name.json` get OAuth versions built +- Uses server display name as OAuth service name + +### 5. Environment Variables + +- `KLAVIS_API_KEY`: Klavis API key (required for OAuth flow) +- `AUTH_DATA`: OAuth authentication data (set by script, used by MCP server) +- `SKIP_OAUTH`: Set to `true` to bypass OAuth authentication entirely (default: `false`) + +### 6. Usage + +#### Standard OAuth Flow +```bash +# Run OAuth version of GitHub MCP server (latest tag now points to OAuth version) +docker run -it -e KLAVIS_API_KEY=your_key_here \ + ghcr.io/klavis-ai/github-mcp-server:latest + +# Or explicitly use a specific commit SHA +docker run -it -e KLAVIS_API_KEY=your_key_here \ + ghcr.io/klavis-ai/github-mcp-server:abc1234 +``` + +Users will see the authentication URL, and after completing authentication, the MCP server will start automatically with GitHub API access. + +**Note**: For servers with OAuth support, the `latest` tag now points to the OAuth-enabled version by default. To access the original non-OAuth version, use a specific commit SHA tag (e.g., `ghcr.io/klavis-ai/github-mcp-server:abc1234`). + +#### Direct Execution (Skip OAuth) +```bash +# Run MCP server directly without OAuth authentication (using latest tag) +docker run -it -e SKIP_OAUTH=true \ + ghcr.io/klavis-ai/github-mcp-server:latest + +# Run with pre-existing auth data (skips OAuth flow but uses provided credentials) +docker run -it -e SKIP_OAUTH=true \ + -e AUTH_DATA='{"access_token":"your_token_here"}' \ + ghcr.io/klavis-ai/github-mcp-server:latest +``` + +This bypasses the OAuth layer entirely and starts the MCP server directly. + +**Important**: Even when `SKIP_OAUTH=true`, if an `AUTH_DATA` environment variable exists, it will still be written to the `.env` file. This allows you to: +- Use pre-existing authentication data without going through the OAuth flow +- Test with manually provided credentials +- Run in development environments with cached auth data + +Useful for: +- Testing without authentication +- Running with pre-configured credentials +- Development environments +- Services that don't require OAuth authentication +- Using cached or manually provided `AUTH_DATA` diff --git a/_oauth_support/docker/Dockerfile.template b/_oauth_support/docker/Dockerfile.template new file mode 100644 index 00000000..02021e92 --- /dev/null +++ b/_oauth_support/docker/Dockerfile.template @@ -0,0 +1,30 @@ +# OAuth Support Layer Dockerfile Template +# This template adds OAuth token acquisition capabilities to existing MCP servers + +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +# Install required dependencies for OAuth support +USER root + +RUN if command -v apt-get >/dev/null 2>&1; then \ + apt-get update && apt-get install -y curl bash jq coreutils && apt-get clean; \ + elif command -v apk >/dev/null 2>&1; then \ + apk add --no-cache curl bash jq coreutils; \ + elif command -v yum >/dev/null 2>&1; then \ + yum install -y curl bash jq coreutils && yum clean all; \ + else \ + echo "Warning: Could not install dependencies. No support package manager found."; \ + fi + +# Copy OAuth support scripts and server name mapping +COPY ./oauth_acquire.sh /klavis_oauth/oauth_acquire.sh +COPY ./docker/entrypoint_wrapper.sh /klavis_oauth/entrypoint_wrapper.sh + +# Make scripts executable +RUN chmod +x /klavis_oauth/oauth_acquire.sh && \ + chmod +x /klavis_oauth/entrypoint_wrapper.sh + +# Set our wrapper as the new entrypoint +ENTRYPOINT ["/klavis_oauth/entrypoint_wrapper.sh", "--server-name", "${MCP_SERVER_NAME}", "--exec"] +CMD ${ENTRYPOINT_COMMAND} diff --git a/_oauth_support/docker/entrypoint_wrapper.sh b/_oauth_support/docker/entrypoint_wrapper.sh new file mode 100755 index 00000000..3fafbbee --- /dev/null +++ b/_oauth_support/docker/entrypoint_wrapper.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +# Entrypoint Wrapper Script +# This script wraps the original MCP server entrypoint to add OAuth support + +set -e + +# Parse arguments +SERVER_NAME="" +EXEC_COMMAND=() + +# Check environment variable to skip OAuth (default: false) +SKIP_OAUTH="${SKIP_OAUTH:-false}" + +while [[ $# -gt 0 ]]; do + case $1 in + --server-name) + SERVER_NAME="$2" + shift 2 + ;; + --exec) + shift + # Collect all remaining arguments as the exec command + EXEC_COMMAND=("$@") + break + ;; + *) + echo "Unknown option: $1" + exit 1 + ;; + esac +done + +echo "OAuth Support Layer - Entrypoint Wrapper" +echo "=========================================" +echo "Server Name: $SERVER_NAME" +echo "Exec Command: ${EXEC_COMMAND[*]}" +echo "Skip OAuth: $SKIP_OAUTH" + +if [[ "$SKIP_OAUTH" == "true" ]]; then + echo "Skipping OAuth authentication - executing command directly" + echo "=========================================" +else + # Execute OAuth token acquisition if needed + echo "Executing OAuth token acquisition..." + cd /klavis_oauth + if ! source ./oauth_acquire.sh "$SERVER_NAME"; then + oauth_exit_code=$? + echo "OAuth token acquisition failed" + exit $oauth_exit_code + fi + + cd - >/dev/null && echo "Back to work folder: $(pwd)" +fi + +# Add AUTH_DATA to .env if it exists +if [[ -n "$AUTH_DATA" ]]; then + echo "AUTH_DATA=$AUTH_DATA" >> .env + echo "Added AUTH_DATA to .env file" +fi +echo "Executing command: ${EXEC_COMMAND[*]}" +echo "=========================================" + +# Display Klavis AI Logo before starting the server with rainbow colors +echo "" +printf " \033[1;31mā–ˆā–ˆā•— ā–ˆā–ˆā•—\033[1;33mā–ˆā–ˆā•— \033[1;32mā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;36mā–ˆā–ˆā•— ā–ˆā–ˆā•—\033[1;34mā–ˆā–ˆā•—\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;91mā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;93mā–ˆā–ˆā•—\033[0m\n" +printf " \033[1;31mā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•\033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—\033[1;36mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā•”ā•ā•ā•ā•ā• \033[1;91mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—\033[1;93mā–ˆā–ˆā•‘\033[0m\n" +printf " \033[1;31mā–ˆā–ˆā–ˆā–ˆā–ˆā•”ā• \033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘\033[1;36mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;91mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m\n" +printf " \033[1;31mā–ˆā–ˆā•”ā•ā–ˆā–ˆā•— \033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘\033[1;36mā•šā–ˆā–ˆā•— ā–ˆā–ˆā•”ā•\033[1;34mā–ˆā–ˆā•‘\033[1;35mā•šā•ā•ā•ā•ā–ˆā–ˆā•‘ \033[1;91mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m\n" +printf " \033[1;31mā–ˆā–ˆā•‘ ā–ˆā–ˆā•—\033[1;33mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•—\033[1;32mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ \033[1;36mā•šā–ˆā–ˆā–ˆā–ˆā•”ā• \033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ \033[1;91mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m\n" +printf " \033[1;31mā•šā•ā• ā•šā•ā•\033[1;33mā•šā•ā•ā•ā•ā•ā•ā•\033[1;32mā•šā•ā• ā•šā•ā• \033[1;36mā•šā•ā•ā•ā• \033[1;34mā•šā•ā•\033[1;35mā•šā•ā•ā•ā•ā•ā•ā• \033[1;91mā•šā•ā• ā•šā•ā•\033[1;93mā•šā•ā•\033[0m\n" +echo "" +printf "\033[1;32m Empowering AI with Seamless Integration\033[0m\n" +printf "\033[1;33m Starting MCP Server...\033[0m\n" +echo "" + +# Execute the original command with all arguments and environment variables preserved +exec "${EXEC_COMMAND[@]}" diff --git a/_oauth_support/oauth_acquire.sh b/_oauth_support/oauth_acquire.sh new file mode 100755 index 00000000..516aa1f8 --- /dev/null +++ b/_oauth_support/oauth_acquire.sh @@ -0,0 +1,92 @@ +#!/bin/bash + +# OAuth Token Acquisition Script +# This script handles the OAuth token acquisition process for MCP servers + +set -e + +SERVER_NAME="$1" + +echo "Starting OAuth token acquisition process for: $SERVER_NAME" + +# Check if AUTH_DATA environment variable exists and is empty +if [ -z "${AUTH_DATA:-}" ]; then + echo "AUTH_DATA is not set or empty. Starting OAuth flow for $SERVER_NAME..." + + # Check for required KLAVIS_API_KEY + if [ -z "${KLAVIS_API_KEY:-}" ]; then + echo "Error: KLAVIS_API_KEY environment variable must be set for OAuth flow" + echo "Please set your Klavis API key: https://www.klavis.ai/home/api-keys" + return 128 + fi + + # Step 1: Create OAuth instance + echo "Creating OAuth instance for $SERVER_NAME..." + INSTANCE_RESPONSE=$(curl --silent --request POST \ + --url https://api.klavis.ai/mcp-server/self-hosted/instance/create \ + --header "Authorization: Bearer $KLAVIS_API_KEY" \ + --header 'Content-Type: application/json' \ + --data "{\"serverName\": \"$SERVER_NAME\", \"userId\": \"local_mcp_server\"}") + + INSTANCE_ID=$(echo "$INSTANCE_RESPONSE" | jq -r '.instanceId') + OAUTH_URL=$(echo "$INSTANCE_RESPONSE" | jq -r '.oauthUrl') + + if [ "$INSTANCE_ID" = "null" ] || [ "$OAUTH_URL" = "null" ]; then + echo "Error: Failed to create OAuth instance" + echo "Response: $INSTANCE_RESPONSE" + return 1 + fi + + # Check if already authorized first, then show URL if needed + echo "Checking authorization status..." + + # Poll for auth completion with 10 minute timeout + MSG_FILE=$(mktemp) + + timeout 600 bash -c " + FIRST_CHECK=true + while true; do + AUTH_RESPONSE=\$(curl --silent --request GET \\ + --url \"https://api.klavis.ai/mcp-server/instance/get-auth/$INSTANCE_ID/" \\ + --header \"Authorization: Bearer $KLAVIS_API_KEY\") + + SUCCESS=\$(echo \"\$AUTH_RESPONSE\" | jq -r '.success') + + if [ \"\$SUCCESS\" = \"true\" ]; then + AUTH_DATA_JSON=\$(echo \"\$AUTH_RESPONSE\" | jq -r '.authData') + echo \"\$AUTH_DATA_JSON\" > \"$MSG_FILE\" + exit 0 + elif [ \"\$FIRST_CHECK\" = \"true\" ]; then + echo \"\" + printf \"\033[1;33m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\033[0m\n\" + printf \"\033[1;32mšŸ”— Please click the link below to authorize access:\033[0m\n\" + printf \"\033[1;33m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\033[0m\n\" + echo \"\" + printf \"\033[1;36m%s\033[0m\n\" \"$OAUTH_URL\" + echo \"\" + printf \"\033[1;33m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\033[0m\n\" + echo \"\" + echo \"Waiting for authorization (timeout: 10 minutes)...\" + FIRST_CHECK=false + fi + sleep 1 + done + " + + AUTH_DATA=$(cat "$MSG_FILE") + AUTH_DATA=$(echo $AUTH_DATA | jq -c) + rm -f "$MSG_FILE" + TIMEOUT_EXIT_CODE=$? + + if [ $TIMEOUT_EXIT_CODE -eq 124 ]; then + echo "Timeout: OAuth authorization was not completed within 5 minutes" + return 1 + elif [ $TIMEOUT_EXIT_CODE -eq 0 ] && [ -f "$MSG_FILE" ]; then + export AUTH_DATA + echo "OAuth token acquisition completed successfully for $SERVER_NAME" + fi +else + echo "AUTH_DATA already exists, skipping OAuth flow for $SERVER_NAME" +fi + +echo "OAuth preparation complete for $SERVER_NAME" diff --git a/_oauth_support/server_name.json b/_oauth_support/server_name.json new file mode 100644 index 00000000..ec80420f --- /dev/null +++ b/_oauth_support/server_name.json @@ -0,0 +1,36 @@ +{ + "_comment": [ + "Server name mapping, it's for GitHub Action use", + "This file maps internal server names to their display names", + "Only includes servers that support OAuth authentication" + ], + "airtable": "Airtable", + "asana": "Asana", + "attio": "Attio", + "clickup": "ClickUp", + "close": "Close", + "confluence": "Confluence", + "dropbox": "Dropbox", + "github": "GitHub", + "gmail": "Gmail", + "gong": "Gong", + "google_calendar": "Google Calendar", + "google_docs": "Google Docs", + "google_drive": "Google Drive", + "google_sheets": "Google Sheets", + "hubspot": "HubSpot", + "jira": "Jira", + "linear": "Linear", + "linkedin": "LinkedIn", + "monday": "Monday", + "moneybird": "Moneybird", + "notion": "Notion", + "onedrive": "Onedrive", + "outlook": "Outlook", + "quickbooks": "QuickBooks", + "salesforce": "Salesforce", + "shopify": "Shopify", + "slack": "Slack", + "supabase": "Supabase", + "wordpress": "WordPress" +} diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..1800114d --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,174 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc \ No newline at end of file diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..5bcaefb9 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,32 @@ +# Mintlify Starter Kit + +Click on `Use this template` to copy the Mintlify starter kit. The starter kit contains examples including + +- Guide pages +- Navigation +- Customizations +- API Reference pages +- Use of popular components + +### Development + +Install the [Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview the documentation changes locally. To install, use the following command + +``` +npm i -g mintlify +``` + +Run the following command at the root of your documentation (where docs.json is) + +``` +mintlify dev +``` + +### Publishing Changes + +Install our Github App to auto propagate changes from your repo to your deployment. Changes will be deployed to production automatically after pushing to the default branch. Find the link to install on your dashboard. + +#### Troubleshooting + +- Mintlify dev isn't running - Run `mintlify install` it'll re-install dependencies. +- Page loads as a 404 - Make sure you are running in a folder with `docs.json` \ No newline at end of file diff --git a/docs/ai-platform-integration/claude.mdx b/docs/ai-platform-integration/claude.mdx new file mode 100644 index 00000000..69080b2e --- /dev/null +++ b/docs/ai-platform-integration/claude.mdx @@ -0,0 +1,290 @@ +--- +title: "Claude" +description: "Learn how to build AI agents that integrate Anthropic's Claude with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/claude/Use_Klavis_with_Claude.ipynb) + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from Anthropic Console + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install anthropic klavis +``` + +```bash TypeScript +npm install @anthropic-ai/sdk klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +os.environ["ANTHROPIC_API_KEY"] = "YOUR_ANTHROPIC_API_KEY" # Replace with your actual Anthropic API key +os.environ["KLAVIS_API_KEY"] = "YOUR_KLAVIS_API_KEY" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +import Anthropic from '@anthropic-ai/sdk'; +import { KlavisClient, Klavis } from 'klavis'; + +// Set environment variables +process.env.ANTHROPIC_API_KEY = "YOUR_ANTHROPIC_API_KEY"; // Replace with your actual Anthropic API key +process.env.KLAVIS_API_KEY = "YOUR_KLAVIS_API_KEY"; // Replace with your actual Klavis API key +``` + + + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} + +``` + + + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create method to use MCP Server with Claude + +This method handles multiple rounds of tool calls until a final response is ready, allowing the AI to chain tool executions for complex tasks. + + + +```python Python +import json +from anthropic import Anthropic + +def claude_with_mcp_server(mcp_server_url: str, user_query: str): + claude_client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY")) + + messages = [ + {"role": "user", "content": f"{user_query}"} + ] + + mcp_server_tools = klavis_client.mcp_server.list_tools( + server_url=mcp_server_url, + format=ToolFormat.ANTHROPIC + ) + + max_iterations = 10 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + response = claude_client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=4000, + system="You are a helpful assistant. Use the available tools to answer the user's question.", + messages=messages, + tools=mcp_server_tools.tools + ) + + messages.append({"role": "assistant", "content": response.content}) + + if response.stop_reason == "tool_use": + tool_results = [] + + for content_block in response.content: + if content_block.type == "tool_use": + function_name = content_block.name + function_args = content_block.input + + print(f"šŸ”§ Calling: {function_name}, with args: {function_args}") + + result = klavis_client.mcp_server.call_tools( + server_url=mcp_server_url, + tool_name=function_name, + tool_args=function_args + ) + + tool_results.append({ + "type": "tool_result", + "tool_use_id": content_block.id, + "content": str(result) + }) + + messages.append({"role": "user", "content": tool_results}) + continue + else: + return response.content[0].text + + return "Max iterations reached without final response" +``` + +```typescript TypeScript +async function claudeWithMcpServer(mcpServerUrl: string, userQuery: string) { + const claudeClient = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY }); + + const messages = [ + { role: "user", content: userQuery } + ]; + + const mcpServerTools = await klavisClient.mcpServer.listTools({ + serverUrl: mcpServerUrl, + format: Klavis.ToolFormat.Anthropic + }); + + const maxIterations = 10; + let iteration = 0; + + while (iteration < maxIterations) { + iteration++; + + const response = await claudeClient.messages.create({ + model: "claude-sonnet-4-5-20250929", + max_tokens: 4000, + system: "You are a helpful assistant. Use the available tools to answer the user's question.", + messages: messages, + tools: mcpServerTools.tools + }); + + messages.push({ role: "assistant", content: response.content }); + + if (response.stop_reason === "tool_use") { + const toolResults = []; + + for (const contentBlock of response.content) { + if (contentBlock.type === "tool_use") { + const functionName = contentBlock.name; + const functionArgs = contentBlock.input; + + console.log(`šŸ”§ Calling: ${functionName}, with args:`, functionArgs); + + const result = await klavisClient.mcpServer.callTools({ + serverUrl: mcpServerUrl, + toolName: functionName, + toolArgs: functionArgs + }); + + toolResults.push({ + type: "tool_result", + tool_use_id: contentBlock.id, + content: JSON.stringify(result) + }); + } + } + + messages.push({ role: "user", content: toolResults }); + continue; + } else { + return response.content[0].text; + } + } + + return "Max iterations reached without final response"; +} +``` + + + +### Step 3 - Run! + + + +```python Python +result = claude_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 emails and summarize them in a Slack message to #general" +) + +print(f"\nšŸ¤– Final Response: {result}") +``` + +```typescript TypeScript +result = await claudeWithMcpServer( + response.strataServerUrl, + "Check my latest emails and summarize them in a Slack message to #updates" +); + +console.log(`\nšŸ¤– Final Response: ${result}`); +``` + + + + +Perfect! You've integrated Claude with Klavis MCP servers. + + +## Next Steps + + + + Explore available MCP servers + + + REST endpoints and schemas + + + + +## Useful Resources + +- [Anthropic API Documentation](https://docs.anthropic.com/) +- [Claude API Reference](https://docs.anthropic.com/en/api/messages) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) + +**Happy building!** šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/crewai.mdx b/docs/ai-platform-integration/crewai.mdx new file mode 100644 index 00000000..7f39b847 --- /dev/null +++ b/docs/ai-platform-integration/crewai.mdx @@ -0,0 +1,390 @@ +--- +title: "CrewAI" +description: "Build powerful AI agent crews that integrate with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- + +## Partnership + +CrewAI has officially showcased their integration with Klavis AI in [this LinkedIn post](https://www.linkedin.com/feed/update/urn:li:activity:7346573584267395072/), demonstrating how to build powerful AI agent crews that can automate complex workflows across multiple platforms. + + + CrewAI and Klavis Integration - Automate your next sales follow-up + + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from OpenAI Platform (CrewAI uses OpenAI as the default model) + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install crewai 'crewai-tools[mcp]' klavis openai +``` + +```bash TypeScript +npm install crewai crewai-tools klavis openai +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +# Set environment variables +os.environ["OPENAI_API_KEY"] = "your-openai-api-key-here" # Replace with your actual OpenAI API key +os.environ["KLAVIS_API_KEY"] = "your-klavis-api-key-here" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +// Set environment variables in your .env file +process.env.OPENAI_API_KEY = "your-openai-api-key-here"; // Replace with your actual OpenAI API key +process.env.KLAVIS_API_KEY = "your-klavis-api-key-here"; // Replace with your actual Klavis API key +``` + + + +## CrewAI with MCP Integration + +CrewAI allows you to create specialized AI agent crews where each agent can have access to different MCP tools. This enables sophisticated multi-agent workflows that can: + +1. **Create MCP Instances**: Set up connections to external services +2. **Specialized Agents**: Each agent focuses on specific tasks with relevant tools +3. **Collaborative Workflows**: Agents work together in sequential or parallel processes +4. **Tool Discovery**: Automatically discover available tools from MCP servers +5. **Smart Coordination**: CrewAI manages task dependencies and agent collaboration + +## Crew AI + Klavis Strata + +Create a crew agent that helps in assisting user queries using Strata Server + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} + +``` + + + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create method to use MCP Server with Crew AI + +This method handles multiple rounds of tool calls until a final response is ready, allowing the AI to chain tool executions for complex tasks. + + + +```python Python +import json +from crewai import Agent, Task, Crew, Process +from crewai_tools import MCPServerAdapter + +def crew_with_mcp_server(mcp_server_url: str, user_query: str): + klavis_server_params = [ + { + "url": mcp_server_url, + "transport": "streamable-http" + } + ] + + with MCPServerAdapter(klavis_server_params) as all_mcp_tools: + print(f"Available tools: {[tool.name for tool in all_mcp_tools]}") + + klavis_agent = Agent( + role="Klavis Query Assistant", + goal="Assist the user with their query using available tools", + backstory="Expert at assisting users with their queries using available tools", + tools=all_mcp_tools, + verbose=False, + llm="gpt-4o" + ) + + klavis_task = Task( + description=f"Answer the user's query: {user_query}", + expected_output="Provide a detailed response to the user's query", + agent=klavis_agent + ) + + crew = Crew( + agents = [klavis_agent], + tasks = [klavis_task], + process=Process.sequential, + verbose=True + ) + + result = crew.kickoff() + return result +``` + +```typescript TypeScript +import { Agent, Task, Crew, Process } from 'crewai'; +import { MCPServerAdapter } from 'crewai-tools'; + +async function crewWithMcpServer(mcpServerUrl: string, userQuery: string) { + const klavisServerParams = [ + { + url: mcpServerUrl, + transport: "streamable-http" + } + ]; + + const mcpAdapter = new MCPServerAdapter(klavisServerParams); + const allMcpTools = await mcpAdapter.getTools(); + + console.log(`Available tools: ${allMcpTools.map(tool => tool.name)}`); + + const klavisAgent = new Agent({ + role: "Klavis Query Assistant", + goal: "Assist the user with their query using available tools", + backstory: "Expert at assisting users with their queries using available tools", + tools: allMcpTools, + verbose: false, + llm: "gpt-4o" + }); + + const klavisTask = new Task({ + description: `Answer the user's query: ${userQuery}`, + expectedOutput: "Provide a detailed response to the user's query", + agent: klavisAgent + }); + + const crew = new Crew({ + agents: [klavisAgent], + tasks: [klavisTask], + process: Process.Sequential, + verbose: true + }); + + const result = await crew.kickoff(); + return result; +} +``` + + + +### Step 3 - Run! + + + +```python Python +result = crew_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 emails and summarize them in a Slack message to #general" +) + +print(f"\nFinal Response: {result}") +``` + +```typescript TypeScript +result = await crewWithMcpServer( + response.strataServerUrl, + "Check my latest emails and summarize them in a Slack message to #updates" +); + +console.log(`\nFinal Response: ${result}`); +``` + + + + +Perfect! You've integrated Crew with Strata MCP servers. + + +## Security Best Practices + +When using CrewAI with Klavis MCP servers, follow these security guidelines: + + + +```python Python +def create_secure_crew(): + """Demonstrates secure MCP server integration with CrewAI""" + + # 1. Use environment variables for sensitive data + api_key = os.getenv("KLAVIS_API_KEY") + if not api_key: + raise ValueError("KLAVIS_API_KEY environment variable is required") + + # 2. Validate server URLs (use HTTPS in production) + server_params = [{ + "url": server_instance.server_url, + "transport": "streamable-http" + }] + + # 3. Always use context managers for proper resource cleanup + try: + with MCPServerAdapter(server_params) as mcp_tools: + # 4. Validate available tools before use + if not mcp_tools: + raise ValueError("No tools available from MCP server") + + print(f"āœ… Securely connected with {len(mcp_tools)} tools") + + # 5. Create agents with limited scope + agent = Agent( + role="Data Analyst", + goal="Analyze data within defined parameters", + backstory="You operate within strict security guidelines.", + tools=mcp_tools, + reasoning=False, # Disable for production + verbose=False # Disable verbose logging in production + ) + + return agent + + except Exception as e: + print(f"šŸ”’ Security check failed: {e}") + return None + +# Example usage +secure_agent = create_secure_crew() +if secure_agent: + print("āœ… Secure crew created successfully") +``` + +```typescript TypeScript +function createSecureCrew() { + // 1. Use environment variables for sensitive data + const apiKey = process.env.KLAVIS_API_KEY; + if (!apiKey) { + throw new Error("KLAVIS_API_KEY environment variable is required"); + } + + // 2. Validate server URLs (use HTTPS in production) + const serverParams = [{ + url: serverInstance.serverUrl, + transport: "streamable-http" + }]; + + // 3. Always handle errors properly + try { + // 4. Validate available tools before use + const mcpTools = new MCPServerAdapter(serverParams); + if (!mcpTools) { + throw new Error("No tools available from MCP server"); + } + + console.log(`āœ… Securely connected with tools`); + + // 5. Create agents with limited scope + const agent = new Agent({ + role: "Data Analyst", + goal: "Analyze data within defined parameters", + backstory: "You operate within strict security guidelines.", + tools: mcpTools, + reasoning: false, // Disable for production + verbose: false // Disable verbose logging in production + }); + + return agent; + + } catch (error) { + console.error(`šŸ”’ Security check failed: ${error}`); + return null; + } +} + +// Example usage +const secureAgent = createSecureCrew(); +if (secureAgent) { + console.log("āœ… Secure crew created successfully"); +} +``` + + + +## Available MCP Servers + +CrewAI works with all Klavis MCP servers. Here are some popular options: + + + + Gmail, Slack, Discord, Outlook + + + YouTube, Notion, Google Docs, WordPress + + + GitHub, Jira, Linear, Confluence + + + Google Sheets, Supabase, PostgreSQL + + + Salesforce, HubSpot, Asana, ClickUp + + + Google Drive, Dropbox, OneDrive + + + +## Summary + +CrewAI + Klavis integration enables you to build sophisticated multi-agent AI systems with real-world capabilities. Key benefits include: + +### šŸš€ **CrewAI + Klavis Benefits:** + +- **Seamless Integration**: MCPServerAdapter makes MCP connection effortless +- **Agent Specialization**: Each agent can focus on specific domains +- **Scalable Architecture**: Easy to add more agents and MCP servers +- **Professional AI Teams**: Create sophisticated multi-agent systems +- **Real-World Impact**: Connect AI to actual business tools and services + +**Ready to build your first AI crew?** Start with a simple agent and expand from there! šŸš€šŸ‘„ \ No newline at end of file diff --git a/docs/ai-platform-integration/fireworks-ai.mdx b/docs/ai-platform-integration/fireworks-ai.mdx new file mode 100644 index 00000000..75d29f5d --- /dev/null +++ b/docs/ai-platform-integration/fireworks-ai.mdx @@ -0,0 +1,394 @@ +--- +title: "Fireworks AI" +description: "Learn how to build AI agents that integrate Fireworks AI's LLMs with Klavis MCP Servers" +--- + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from Fireworks AI + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install fireworks-ai klavis +``` + +```bash TypeScript +npm install fireworks-ai klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +# Set environment variables +os.environ["FIREWORKS_API_KEY"] = "your-fireworks-api-key-here" # Replace with your actual Fireworks API key +os.environ["KLAVIS_API_KEY"] = "your-klavis-api-key-here" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +// Set environment variables in your .env file +process.env.FIREWORKS_API_KEY = "your-fireworks-api-key-here"; // Replace with your actual Fireworks API key +process.env.KLAVIS_API_KEY = "your-klavis-api-key-here"; // Replace with your actual Klavis API key +``` + + + +## Basic Setup + + + +```python Python +import os +import json +from fireworks.client import Fireworks +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +# Initialize clients +fireworks_client = Fireworks(api_key=os.getenv("FIREWORKS_API_KEY")) +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) +``` + +```typescript TypeScript +import Fireworks from 'fireworks-ai'; +import { KlavisClient, Klavis } from 'klavis'; + +// Initialize clients +const fireworksClient = new Fireworks({ apiKey: process.env.FIREWORKS_API_KEY }); +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); +``` + + + +## AI Agent with MCP Integration + +Now we'll create an intelligent agent that can use MCP servers through Klavis API. This agent will: + +1. **Discover Tools**: Automatically find available tools from MCP servers +2. **Function Calling**: Use Fireworks AI's function calling capabilities +3. **Tool Execution**: Execute tools through Klavis API +4. **Smart Responses**: Generate intelligent responses based on tool results + + + +```python Python +class Agent: + def __init__(self, fireworks_client, klavis_client, mcp_server_url): + self.fireworks = fireworks_client + self.klavis = klavis_client + self.mcp_server_url = mcp_server_url + self.model = "accounts/fireworks/models/qwen2p5-72b-instruct" + print(f"šŸ¤– Agent initialized with model: {self.model}") + + def process_request(self, user_message): + # 1. Get available tools + mcp_tools = self.klavis.mcp_server.list_tools( + server_url=self.mcp_server_url, + format=ToolFormat.OPENAI + ) + + # 2. Call LLM with tools + messages = [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": user_message} + ] + + response = self.fireworks.chat.completions.create( + model=self.model, + messages=messages, + tools=mcp_tools.tools + ) + + assistant_message = response.choices[0].message + messages.append(assistant_message) + + # 3. If LLM wants to use tools + if assistant_message.tool_calls: + + # Execute each tool call + for tool_call in assistant_message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) + + print(f"šŸ› ļø Calling tool: {tool_name} with args: {tool_args}") + # Call tool via Klavis SDK + tool_result = self.klavis.mcp_server.call_tools( + server_url=self.mcp_server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(tool_result) + }) + + # 4. Get final response from LLM + final_response = self.fireworks.chat.completions.create( + model=self.model, + messages=messages + ) + return final_response.choices[0].message.content + + # If no tools needed, return the assistant message directly + return assistant_message.content +``` + +```typescript TypeScript +class Agent { + private fireworks: Fireworks; + private klavis: KlavisClient; + private mcpServerUrl: string; + private model: string; + + constructor(fireworksClient: Fireworks, klavisClient: KlavisClient, mcpServerUrl: string) { + this.fireworks = fireworksClient; + this.klavis = klavisClient; + this.mcpServerUrl = mcpServerUrl; + this.model = "accounts/fireworks/models/qwen2p5-72b-instruct"; + console.log(`šŸ¤– Agent initialized with model: ${this.model}`); + } + + async processRequest(userMessage: string) { + // 1. Get available tools + const mcpTools = await this.klavis.mcpServer.listTools({ + serverUrl: this.mcpServerUrl, + format: Klavis.ToolFormat.Openai + }); + + // 2. Call LLM with tools + const messages = [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: userMessage } + ]; + + const response = await this.fireworks.chat.completions.create({ + model: this.model, + messages: messages, + tools: mcpTools.tools + }); + + const assistantMessage = response.choices[0].message; + messages.push(assistantMessage); + + // 3. If LLM wants to use tools + if (assistantMessage.tool_calls) { + // Execute each tool call + for (const toolCall of assistantMessage.tool_calls) { + const toolName = toolCall.function.name; + const toolArgs = JSON.parse(toolCall.function.arguments); + + console.log(`šŸ› ļø Calling tool: ${toolName} with args:`, toolArgs); + // Call tool via Klavis SDK + const toolResult = await this.klavis.mcpServer.callTools({ + serverUrl: this.mcpServerUrl, + toolName: toolName, + toolArgs: toolArgs + }); + + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(toolResult) + }); + } + + // 4. Get final response from LLM + const finalResponse = await this.fireworks.chat.completions.create({ + model: this.model, + messages: messages + }); + return finalResponse.choices[0].message.content; + } + + // If no tools needed, return the assistant message directly + return assistantMessage.content; + } +} +``` + + + +## Use Case Examples + +### Example 1: Summarize YouTube Video + + + + Set up Fireworks AI and Klavis API clients + + + Create a YouTube MCP server instance + + + Use the agent to analyze and summarize a YouTube video + + + + + +```python Python +YOUTUBE_VIDEO_URL = "https://www.youtube.com/watch?v=kPXvf2-C_Hs" # Pick a video you like! + +# 1. Create YouTube MCP server instance +youtube_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="1234" +) + +# 2. Create an agent with YouTube MCP server +agent = Agent(fireworks_client, klavis_client, youtube_mcp_instance.server_url) + +# 3. Process the request +response = agent.process_request( + f"Summarize this YouTube video with timestamps: {YOUTUBE_VIDEO_URL}" +) + +print(response) +``` + +```typescript TypeScript +const YOUTUBE_VIDEO_URL = "https://www.youtube.com/watch?v=kPXvf2-C_Hs"; // Pick a video you like! + +// 1. Create YouTube MCP server instance +const youtubeMcpInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Youtube, + userId: "1234" +}); + +// 2. Create an agent with YouTube MCP server +const agent = new Agent(fireworksClient, klavisClient, youtubeMcpInstance.serverUrl); + +// 3. Process the request +const response = await agent.processRequest( + `Summarize this YouTube video with timestamps: ${YOUTUBE_VIDEO_URL}` +); + +console.log(response); +``` + + + +### Example 2: Send Email via Gmail + + +Gmail integration requires OAuth authentication, so you'll need to authorize the application in your browser. + + + + + Create a Gmail MCP server instance + + + Complete OAuth flow for Gmail access + + + Use the agent to send an email + + + + + +```python Python +import webbrowser + +# Create Gmail MCP server instance +gmail_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="1234" +) + +# Redirect to Gmail OAuth page +webbrowser.open(gmail_mcp_instance.oauth_url) +print(f"šŸ” Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: {gmail_mcp_instance.oauth_url}") + +EMAIL_SUBJECT = "Hello, World!" +EMAIL_BODY = "This is a test email sent using Fireworks AI and Klavis integration." +EMAIL_RECIPIENT = "recipient@example.com" # Replace with your email + +# After OAuth authorization, create an agent with Gmail MCP server +agent = Agent(fireworks_client, klavis_client, gmail_mcp_instance.server_url) + +# Send the email +response = agent.process_request( + f"Send an email to {EMAIL_RECIPIENT} with subject {EMAIL_SUBJECT} and body {EMAIL_BODY}" +) + +print(response) +``` + +```typescript TypeScript +// Create Gmail MCP server instance +const gmailMcpInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "1234" +}); + +// Redirect to Gmail OAuth page +console.log("šŸ” Opening OAuth authorization for Gmail"); +console.log(`If you are not redirected, please open the following URL in your browser: ${gmailMcpInstance.oauthUrl}`); +// In a web environment, you might redirect the user +window.open(gmailMcpInstance.oauthUrl); + +const EMAIL_SUBJECT = "Hello, World!"; +const EMAIL_BODY = "This is a test email sent using Fireworks AI and Klavis integration."; +const EMAIL_RECIPIENT = "recipient@example.com"; // Replace with your email + +// After OAuth authorization, create an agent with Gmail MCP server +const agent = new Agent(fireworksClient, klavisClient, gmailMcpInstance.serverUrl); + +// Send the email +const response = await agent.processRequest( + `Send an email to ${EMAIL_RECIPIENT} with subject ${EMAIL_SUBJECT} and body ${EMAIL_BODY}` +); + +console.log(response); +``` + + + +## Next Steps + + + + Try other available servers like Slack, Notion, CRM, etc. + + + Experiment with various models like Llama, Mixtral, or Deepseek for different use cases + + + Create sophisticated agents that combine Gmail + Slack + Notion for complete business automation + + + Scale these patterns for production applications + + + +## Useful Resources + +- [Fireworks AI Documentation](https://docs.fireworks.ai/) +- [Klavis AI Documentation](https://www.klavis.ai/docs/) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) +- [Klavis MCP Servers](/mcp-server) + +**Happy building with Fireworks AI and Klavis!** šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/gemini.mdx b/docs/ai-platform-integration/gemini.mdx new file mode 100644 index 00000000..e01f954d --- /dev/null +++ b/docs/ai-platform-integration/gemini.mdx @@ -0,0 +1,325 @@ +--- +title: "Gemini" +description: "Learn how to build AI agents that integrate Google's Gemini with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/google-genai/Use_Klavis_with_Gemini.ipynb) + +# Gemini + Klavis AI Integration + +This tutorial demonstrates how to use Google's Gemini with function calling with Klavis MCP (Model Context Protocol) servers. + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from Google AI Studio + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install google-genai klavis +``` + +```bash TypeScript +npm install @google/genai klavis +``` + + + +## Full Code Examples + +For complete working examples, check out the source code: + + + + + + + + +## Setup Environment Variables + + + +```python Python +import os + +# Set environment variables +os.environ["GEMINI_API_KEY"] = "YOUR_GEMINI_API_KEY" # Replace with your actual Gemini API key +os.environ["KLAVIS_API_KEY"] = "YOUR_KLAVIS_API_KEY" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +import { GoogleGenAI } from '@google/genai'; +import { KlavisClient, Klavis } from 'klavis'; + +// Set environment variables +process.env.GEMINI_API_KEY = "YOUR_GEMINI_API_KEY"; // Replace with your actual Gemini API key +process.env.KLAVIS_API_KEY = "YOUR_KLAVIS_API_KEY"; // Replace with your actual Klavis API key +``` + + + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} + +``` + + + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create method to use MCP Server with Claude + +This method handles multiple rounds of tool calls until a final response is ready, allowing the AI to chain tool executions for complex tasks. + + + +```python Python +import json +from google import genai +from google.genai import types + +def gemini_with_mcp_server(mcp_server_url: str, user_query: str): + gemini_client = genai.Client(api_key=os.getenv("GEMINI_API_KEY")) + + contents = [] + contents.append(types.Content(role="user", parts=[types.Part(text=user_query)])) + + mcp_server_tools = klavis_client.mcp_server.list_tools( + server_url=mcp_server_url, + format=ToolFormat.GEMINI + ) + + max_iterations = 10 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + response = gemini_client.models.generate_content( + model='gemini-2.5-flash', + contents=contents, + config=types.GenerateContentConfig(tools=mcp_server_tools.tools) + ) + + if response.candidates and response.candidates[0].content.parts: + contents.append(response.candidates[0].content) + + # Check if there are function calls to execute + has_function_calls = False + for part in response.candidates[0].content.parts: + if hasattr(part, 'function_call') and part.function_call: + has_function_calls = True + function_name = part.function_call.name + function_args = dict(part.function_call.args) + + print(f"Calling: {function_name}, with args: {function_args}") + + result = klavis_client.mcp_server.call_tools( + server_url=mcp_server_url, + tool_name=function_name, + tool_args=function_args + ) + + function_response_part = types.Part.from_function_response( + name=function_name, + response={'result': result.result} + ) + function_response_content = types.Content( + role='tool', + parts=[function_response_part] + ) + contents.append(function_response_content) + + if has_function_calls: + continue + else: + return response.text + else: + return "No response generated." + + return "Max iterations reached without final response" +``` + +```typescript TypeScript +async function geminiWithMcpServer(mcpServerUrl: string, userQuery: string) { + const geminiClient = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY }); + + const contents = []; + contents.push({ role: "user", parts: [{ text: userQuery }] }); + + const mcpServerTools = await klavisClient.mcpServer.listTools({ + serverUrl: mcpServerUrl, + format: Klavis.ToolFormat.Gemini + }); + + const maxIterations = 10; + let iteration = 0; + + while (iteration < maxIterations) { + iteration++; + + const response = await geminiClient.models.generateContent({ + model: 'gemini-2.5-flash', + contents: contents, + config: { tools: mcpServerTools.tools } + }); + + if (response.candidates && response.candidates[0].content.parts) { + contents.push(response.candidates[0].content); + + // Check if there are function calls to execute + let hasFunctionCalls = false; + for (const part of response.candidates[0].content.parts) { + if (part.functionCall) { + hasFunctionCalls = true; + const functionName = part.functionCall.name; + const functionArgs = part.functionCall.args; + + console.log(`šŸ”§ Calling: ${functionName}, with args:`, functionArgs); + + const result = await klavisClient.mcpServer.callTools({ + serverUrl: mcpServerUrl, + toolName: functionName, + toolArgs: functionArgs + }); + + const functionResponsePart = { + functionResponse: { + name: functionName, + response: { result: result.result } + } + }; + const functionResponseContent = { + role: 'tool', + parts: [functionResponsePart] + }; + contents.push(functionResponseContent); + } + } + + if (hasFunctionCalls) { + continue; + } else { + return response.text; + } + } else { + return "No response generated."; + } + } + + return "Max iterations reached without final response"; +} +``` + + + +### Step 3 - Run! + + + +```python Python +result = gemini_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 gmails and summarize them in a Slack message to #engineering" +) + +print(f"\nšŸ¤– Final Response: {result}") +``` + +```typescript TypeScript +const result = await geminiWithMcpServer( + response.strataServerUrl, + "Check my latest 5 gmails and summarize them in a Slack message to #engineering" +); + +console.log(`\nšŸ¤– Final Response: ${result}`); +``` + + + + +Perfect! You've integrated Gemini with Klavis MCP servers. + + +## Next Steps + + + + Try other available servers like Slack, Notion, GitHub, etc. + + + Build workflows that combine text, images, and other media + + + Scale these patterns for production applications + + + Build custom MCP servers for your specific needs + + + +## Useful Resources + +- [Google AI Documentation](https://ai.google.dev/) +- [Gemini API Reference](https://ai.google.dev/api) +- [Klavis AI Documentation](https://www.klavis.ai/docs/) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) +- [Klavis MCP Servers](/mcp-server) + +**Happy building!** šŸš€ diff --git a/docs/ai-platform-integration/google-adk.mdx b/docs/ai-platform-integration/google-adk.mdx new file mode 100644 index 00000000..d4e3edce --- /dev/null +++ b/docs/ai-platform-integration/google-adk.mdx @@ -0,0 +1,153 @@ +--- +title: "Google ADK" +description: "This tutorial demonstrates how to integrate Google Agent Development Kit (ADK) with Klavis MCP servers to build AI agents that can interact with Gmail and Slack." +--- + + + Google ADK and Klavis Integration - Build AI agents with MCP tools + + + +You can find the complete example code in Klavis GitHub repository **[here ->](https://github.com/Klavis-AI/klavis/tree/main/examples/google_adk/python)** + + +## Prerequisites + +Before we begin, you'll need: [Google API key](https://console.cloud.google.com/apis/credentials) and [Klavis API key](https://www.klavis.ai/home/api-keys). + +## Installation + +First, install the required packages: + +```bash +pip install google-adk klavis +``` + +## Setup Environment Variables + +```python +import os +from dotenv import load_dotenv + +load_dotenv() + +os.environ["KLAVIS_API_KEY"] = "YOUR_KLAVIS_API_KEY" # Replace +``` + +### Step 1 - Create an Agent Project + +Run the ADK create command to start a new agent project: + +```bash +adk create my_agent +``` + +This will create the following project structure: + +``` +my_agent/ + agent.py # main agent code + .env # API keys or project IDs + __init__.py +``` + +### Step 2 - Configure Agent with Klavis MCP + +The `agent.py` file contains a `root_agent` definition which is the only required element of an ADK agent. + +Update your `agent.py` to integrate Klavis MCP servers: + +```python +import os +import webbrowser + +from google.adk.agents.llm_agent import Agent +from google.adk.tools.mcp_tool import StreamableHTTPConnectionParams +from google.adk.tools.mcp_tool.mcp_toolset import McpToolset +from klavis import Klavis +from klavis.types import McpServerName + +from dotenv import load_dotenv +load_dotenv() + +KLAVIS_API_KEY = os.getenv("KLAVIS_API_KEY") + +# Initialize Klavis and set up Strata server +klavis_client = Klavis(api_key=KLAVIS_API_KEY) + +user_id = "user_123" + +# Create Strata server with multiple MCP servers +strata_response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id=user_id +) + +# Handle OAuth authentication +if strata_response.oauth_urls: + for server_name, oauth_url in strata_response.oauth_urls.items(): + user_integration_auth = klavis_client.user.get_user_auth( + user_id=user_id, + server_name=server_name + ) + if not user_integration_auth.is_authenticated: + print(f"šŸ” Opening OAuth for {server_name}...") + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + +mcp_server_url = strata_response.strata_server_url + +# Create AI agent with MCP toolset (exposed at module level for ADK) +root_agent = Agent( + name="my_agent", + model="gemini-2.5-flash", + description="An agent with access to tools through Klavis MCP", + instruction="You are a helpful assistant with access to MCP tools.", + tools=[ + McpToolset( + connection_params=StreamableHTTPConnectionParams( + url=mcp_server_url, + ), + ) + ], +) +``` + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 3 - Run Your Agent + +Launch the web interface to interact with your agent: + +```bash +adk web +``` + +This will start a local web server where you can chat with your agent and watch it use the Gmail and Slack MCP tools. + + +Perfect! You've integrated Google ADK with Klavis MCP servers. + + +## Next Steps + + + + Explore available MCP servers + + + REST endpoints and schemas + + + + +## Useful Resources + +- [Google ADK Documentation](https://google.github.io/adk/) +- [Google ADK GitHub](https://github.com/google/adk) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) + +**Happy building!** šŸš€ + diff --git a/docs/ai-platform-integration/langchain.mdx b/docs/ai-platform-integration/langchain.mdx new file mode 100644 index 00000000..5b265c3b --- /dev/null +++ b/docs/ai-platform-integration/langchain.mdx @@ -0,0 +1,213 @@ +--- +title: "LangChain" +description: "This tutorial demonstrates how to integrate LangChain's agent framework with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- +## Prerequisites + +Before we begin, you'll need [OpenAI API key]((https://platform.openai.com/api-keys)) and [Klavis API key](https://www.klavis.ai/home/api-keys). + +## Installation + +First, install the required packages: + + + +```bash Python +pip install langchain-mcp-adapters langgraph langchain-openai klavis +``` + +```bash TypeScript +npm install @langchain/mcp-adapters @langchain/langgraph @langchain/openai klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY" # Replace +os.environ["KLAVIS_API_KEY"] = "YOUR_KLAVIS_API_KEY" # Replace +``` + +```typescript TypeScript +// Set environment variables +process.env.OPENAI_API_KEY = "YOUR_OPENAI_API_KEY"; // Replace with your actual OpenAI API key +process.env.KLAVIS_API_KEY = "YOUR_KLAVIS_API_KEY"; // Replace with your actual Klavis API key +``` + + + + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} +``` + + + + +*OAuth Authorization Required*: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create LangChain Agent with Strata MCP Server + + + +```python Python +import asyncio +from langchain_mcp_adapters.client import MultiServerMCPClient +from langgraph.prebuilt import create_react_agent +from langchain_openai import ChatOpenAI + +# Initialize LLM +llm = ChatOpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + +# Create MCP client with Strata server +mcp_client = MultiServerMCPClient({ + "strata": { + "transport": "streamable_http", + "url": response.strata_server_url + } +}) + +# Get tools from Strata MCP server +tools = asyncio.run(mcp_client.get_tools()) + +# Create agent with MCP-based tools +agent = create_react_agent( + model=llm, + tools=tools, + prompt="You are a helpful assistant that uses MCP tools to interact with Gmail and Slack." +) + +print("šŸ¤– LangChain agent created successfully!") +``` + +```typescript TypeScript +// Initialize LLM +const llm = new ChatOpenAI({ + modelName: "gpt-4o-mini", + apiKey: process.env.OPENAI_API_KEY +}); + +// Create MCP client with Strata server +const mcpClient = new MultiServerMCPClient({ + throwOnLoadError: true, + useStandardContentBlocks: true, + mcpServers: { + strata: { + url: response.strataServerUrl, + transport: "streamable_http" + } + } +}); + +// Get tools from Strata MCP server +const tools = await mcpClient.getTools(); + +// Create agent with MCP-based tools +const agent = createReactAgent({ + llm: llm, + tools: tools, + systemMessage: "You are a helpful assistant that uses MCP tools to interact with Gmail and Slack." +}); + +console.log("šŸ¤– LangChain agent created successfully!"); +``` + + + +### Step 3 - Run! + + + +```python Python +response_message = asyncio.run(agent.ainvoke({ + "messages": [{"role": "user", "content": "Check my latest 5 emails and summarize them in a Slack message to #general"}] +})) + +print(f"\nšŸ¤– Final Response: {response_message['messages'][-1].content}") +``` + +```typescript TypeScript +try { + const response = await agent.invoke({ + messages: [{ role: "user", content: "Check my latest emails and summarize them in a Slack message to #updates" }] + }); + console.log(`\nšŸ¤– Final Response:`, response); +} catch (error) { + console.error("Error during agent execution:", error); +} finally { + await mcpClient.close(); +} +``` + + + + +Perfect! You've integrated LangChain with Klavis MCP servers. + + +## Next Steps + + + + Explore available MCP servers + + + REST endpoints and schemas + + + + +## Useful Resources + +- [LangChain Documentation](https://python.langchain.com/docs/) +- [LangGraph Documentation](https://langchain-ai.github.io/langgraph/) +- [LangChain MCP Adapters](https://pypi.org/project/langchain-mcp-adapters/) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) + +*Happy building* šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/llamaindex.mdx b/docs/ai-platform-integration/llamaindex.mdx new file mode 100644 index 00000000..61a579cd --- /dev/null +++ b/docs/ai-platform-integration/llamaindex.mdx @@ -0,0 +1,263 @@ +--- +title: "LlamaIndex" +description: "Learn how to build AI agents that integrate with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- + +## Partnership + +LlamaIndex has officially showcased their integration with Klavis AI in [this LinkedIn post](https://www.linkedin.com/posts/llamaindex_build-ai-agents-that-connect-to-youtube-activity-7344107221221355521-UrOl?utm_source=share&utm_medium=member_desktop&rcm=ACoAACh0ewEBh9MR1nb_U_x3e5bqgDYgETJ8d5Y), demonstrating how to build AI agents that connect to MCP Servers in just a few lines of code. + + + LlamaIndex and Klavis Integration - Build AI agents that connect to MCP Servers + + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from OpenAI Platform (LlamaIndex uses OpenAI as the default LLM) + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install llama-index llama-index-tools-mcp klavis +``` + +```bash TypeScript +npm install @llamaindex/tools @llamaindex/workflow @llamaindex/openai klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +# Set environment variables +os.environ["OPENAI_API_KEY"] = "your-openai-api-key-here" # Replace with your actual OpenAI API key +os.environ["KLAVIS_API_KEY"] = "your-klavis-api-key-here" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +// Set environment variables in your .env file +process.env.OPENAI_API_KEY = "your-openai-api-key-here"; // Replace with your actual OpenAI API key +process.env.KLAVIS_API_KEY = "your-klavis-api-key-here"; // Replace with your actual Klavis API key +``` + + + +## Basic Setup + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName +from llama_index.llms.openai import OpenAI +from llama_index.tools.mcp import ( + BasicMCPClient, + get_tools_from_mcp_url, + aget_tools_from_mcp_url +) + +# Initialize clients +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) +llm = OpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) +``` + +```typescript TypeScript +import { KlavisClient, Klavis } from 'klavis'; +import { mcp } from "@llamaindex/tools"; +import { agent, multiAgent } from "@llamaindex/workflow"; +import { openai } from "@llamaindex/openai"; + +// Initialize clients +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); +``` + + + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} + +``` + + + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create method to use MCP Server with LlamaIndex + +This method handles multiple rounds of tool calls until a final response is ready, allowing the AI to chain tool executions for complex tasks. + + + +```python Python +import json +from llama_index.core.agent.workflow import FunctionAgent, AgentWorkflow + +async def llamaindex_with_mcp_server(mcp_server_url: str, user_query: str): + llm = OpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + all_tools = await aget_tools_from_mcp_url( + mcp_server_url, + client=BasicMCPClient(mcp_server_url) + ) + + communication_agent = FunctionAgent( + name="communication_agent", + description="Agent that can read emails from Gmail and send messages to Slack", + tools=all_tools, + llm=llm, + system_prompt="You are a helpful assistant. Use the available tools to answer the user's question.", + max_iterations=10 + ) + + workflow = AgentWorkflow( + agents=[communication_agent], + root_agent="communication_agent" + ) + + resp = await workflow.run(user_msg=user_query) + + return resp.response.content +``` + +```typescript TypeScript +async function llamaindexWithMcpServer(mcpServerUrl: string, userQuery: string) { + const llm = new openai.OpenAI({ + model: "gpt-4o-mini", + apiKey: process.env.OPENAI_API_KEY + }); + + const allTools = await mcp.getToolsFromMcpUrl( + mcpServerUrl, + new mcp.BasicMCPClient(mcpServerUrl) + ); + + const communicationAgent = new agent.FunctionAgent({ + name: "communication_agent", + description: "Agent that can read emails from Gmail and send messages to Slack", + tools: allTools, + llm: llm, + systemPrompt: "You are a helpful assistant. Use the available tools to answer the user's question.", + maxIterations: 10 + }); + + const workflow = new agent.AgentWorkflow({ + agents: [communicationAgent], + rootAgent: "communication_agent" + }); + + const resp = await workflow.run({ userMsg: userQuery }); + + return resp.response.content; +} +``` + + + + +### Step 3 - Run! + + + +```python Python +result = await llamaindex_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 emails and summarize them in a Slack message to #general" +) + +print(f"\nFinal Response: {result}") +``` + +```typescript TypeScript +result = await llamaindexWithMcpServer( + response.strataServerUrl, + "Check my latest 5 emails and summarize them in a Slack message to #general" +); + +console.log(`\nFinal Response: ${result}`); +``` + + + + +Perfect! You've integrated LLamaIndex with Strata MCP servers. + + +## Next Steps + + + + Explore available MCP servers + + + REST endpoints and schemas + + + +## Useful Resources + +- [LlamaIndex Documentation](https://docs.llamaindex.ai/) +- [Klavis AI Documentation](https://www.klavis.ai/docs/) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) +- [Klavis MCP Servers](/mcp-server) + +**Happy building with LlamaIndex and Klavis!** šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/mastra.mdx b/docs/ai-platform-integration/mastra.mdx new file mode 100644 index 00000000..e467437c --- /dev/null +++ b/docs/ai-platform-integration/mastra.mdx @@ -0,0 +1,127 @@ +--- +title: "Mastra" +description: "Learn how to build AI agents that integrate Mastra framework with Klavis MCP Servers for enhanced functionality" +--- + +## Partnership + +Mastra has officially featured Klavis AI in [their MCP registry documentation](https://mastra.ai/en/docs/tools-mcp/mcp-overview#connecting-to-an-mcp-registry), showcasing how to connect to MCP servers for building powerful AI agents. + + + Mastra and Klavis Integration - Connect to MCP servers through registry + + +## Prerequisites + +Before we begin, you'll need [OpenAI API key](https://platform.openai.com/api-keys) and [Klavis API key](https://www.klavis.ai/home/api-keys). + + + +You can find the complete example code in Klavis GitHub repository: **[šŸ“ Checkout here](https://github.com/Klavis-AI/klavis/tree/main/examples/mastra-klavis)** + + +## Setup Environment Variables + +Create a `.env` file in your project root: + +```env +OPENAI_API_KEY=your_openai_api_key_here +KLAVIS_API_KEY=your_klavis_api_key_here +``` + +## Project Structure + +``` +mastra-klavis-example/ +ā”œā”€ā”€ src/ +│ └── mastra/ +│ └── index.ts +ā”œā”€ā”€ package.json +└── tsconfig.json +``` + +## Code Example + +```typescript +import { Mastra } from '@mastra/core/mastra'; +import { Agent } from '@mastra/core/agent'; +import { openai } from '@ai-sdk/openai'; +import { MCPClient } from '@mastra/mcp'; +import { KlavisClient, Klavis } from 'klavis'; +import open from 'open'; + +// Creates an MCP Agent with tools from Klavis Strata server +export const createMcpAgent = async (userId: string = 'test-user'): Promise => { + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP Server with Gmail and Slack + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId + }); + + // Handle OAuth authorization for each service + if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + await open(oauthUrl); + console.log(`Please complete ${serverName} OAuth authorization at: ${oauthUrl}`); + } + } + + // Initialize the MCP client with Strata server URL + const mcpClient = new MCPClient({ + servers: { + strata: { + url: new URL(response.strataServerUrl) + } + } + }); + + // Create agent + return new Agent({ + name: 'MCP Agent', + instructions: `You are an AI agent with access to MCP tools.`, + model: openai('gpt-4o-mini'), + tools: await mcpClient.getTools() + }); +}; + +const agent = await createMcpAgent(); + +export const mastra = new Mastra({ + agents: { agent } +}); +``` + +## Running the Agent + +```bash +npm install +npm run dev +``` + +## Video Tutorial + + + +## Summary + +This implementation demonstrates how to integrate Mastra with Klavis Strata servers to access multiple MCP services (Gmail and Slack) through a single unified server. The agent is configured with MCP tools and can interact with various services through the MCP protocol. + +## Useful Resources + +- [Mastra Doc](https://mastra.ai/docs) +- [Mastra GitHub Repo](https://github.com/mastra-ai/mastra) +- [MCP Spec](https://modelcontextprotocol.io/) + +**Happy building with Mastra and Klavis!** šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/openai.mdx b/docs/ai-platform-integration/openai.mdx new file mode 100644 index 00000000..e8f92a73 --- /dev/null +++ b/docs/ai-platform-integration/openai.mdx @@ -0,0 +1,301 @@ +--- +title: "OpenAI" +description: "This tutorial demonstrates how to integrate OpenAI's function calling capabilities with Strata MCP servers to build AI agents that can interact with Gmail and Slack." +--- +## Prerequisites + +Before we begin, you'll need [OpenAI API key]((https://platform.openai.com/api-keys)) and [Klavis API key](https://www.klavis.ai/home/api-keys). + +## Installation + +First, install the required packages: + + + +```bash Python +pip install openai klavis +``` + +```bash TypeScript +npm install openai klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY" # Replace +os.environ["KLAVIS_API_KEY"] = "YOUR_KLAVIS_API_KEY" # Replace +``` + +```typescript TypeScript +import OpenAI from 'openai'; +import { KlavisClient, Klavis } from 'klavis'; + +// Set environment variables +process.env.OPENAI_API_KEY = "YOUR_OPENAI_API_KEY"; // Replace with your actual OpenAI API key +process.env.KLAVIS_API_KEY = "YOUR_KLAVIS_API_KEY"; // Replace with your actual Klavis API key +``` + + + + +### Step 1 - Create Strata MCP Server with Gmail and Slack + + + +```python Python +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +import webbrowser + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + print(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") +``` + +```typescript TypeScript +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); + +const response = await klavisClient.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId: "1234" +}); + +// Handle OAuth authorization for each services +if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + window.open(oauthUrl); + // Wait for user to complete OAuth + await new Promise(resolve => { + const input = prompt(`Press OK after completing ${serverName} OAuth authorization...`); + resolve(input); + }); + } +} + +``` + + + + +**OAuth Authorization Required**: The code above will open browser windows for each service. Click through the OAuth flow to authorize access to your accounts. + + +### Step 2 - Create method to use MCP Server with OpenAI + +This method handles multiple rounds of tool calls until a final response is ready, allowing the AI to chain tool executions for complex tasks. + + + +```python Python +import json +from openai import OpenAI + +def openai_with_mcp_server(mcp_server_url: str, user_query: str): + openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + + messages = [ + {"role": "system", "content": "You are a helpful assistant. Use the available tools to answer the user's question."}, + {"role": "user", "content": f"{user_query}"} + ] + + tools_info = klavis_client.mcp_server.list_tools( + server_url=mcp_server_url, + format=ToolFormat.OPENAI + ) + + max_iterations = 10 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + response = openai_client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tools=tools_info.tools, + tool_choice="auto", + ) + + assistant_message = response.choices[0].message + + if assistant_message.tool_calls: + messages.append({ + "role": "assistant", + "content": assistant_message.content, + "tool_calls": [ + { + "id": tc.id, + "type": "function", + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments + } + } + for tc in assistant_message.tool_calls + ] + }) + + for tool_call in assistant_message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) + + print(f"Calling: {tool_name}") + print(f"Arguments: {json.dumps(tool_args, indent=2)}") + + function_result = klavis_client.mcp_server.call_tools( + server_url=mcp_server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(function_result) + }) + continue + else: + messages.append({"role": "assistant", "content": assistant_message.content}) + return assistant_message.content + + return "Max iterations reached without final response" +``` + +```typescript TypeScript +async function openaiWithMcpServer(mcpServerUrl: string, userQuery: string) { + const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); + + const messages = [ + { role: "system", content: "You are a helpful assistant. Use the available tools to answer the user's question." }, + { role: "user", content: userQuery } + ]; + + const toolsInfo = await klavisClient.mcpServer.listTools({ + serverUrl: mcpServerUrl, + format: Klavis.ToolFormat.Openai + }); + + const maxIterations = 10; + let iteration = 0; + + while (iteration < maxIterations) { + iteration++; + + const response = await openaiClient.chat.completions.create({ + model: "gpt-4o-mini", + messages: messages, + tools: toolsInfo.tools, + tool_choice: "auto" + }); + + const assistantMessage = response.choices[0].message; + + if (assistantMessage.tool_calls) { + messages.push({ + role: "assistant", + content: assistantMessage.content, + tool_calls: assistantMessage.tool_calls.map(tc => ({ + id: tc.id, + type: "function", + function: { + name: tc.function.name, + arguments: tc.function.arguments + } + })) + }); + + for (const toolCall of assistantMessage.tool_calls) { + const toolName = toolCall.function.name; + const toolArgs = JSON.parse(toolCall.function.arguments); + + console.log(`šŸ”§ Calling: ${toolName}`); + console.log(` Arguments:`, JSON.stringify(toolArgs, null, 2)); + + const functionResult = await klavisClient.mcpServer.callTools({ + serverUrl: mcpServerUrl, + toolName: toolName, + toolArgs: toolArgs + }); + + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(functionResult) + }); + } + continue; + } else { + messages.push({ role: "assistant", content: assistantMessage.content }); + return assistantMessage.content; + } + } + + return "Max iterations reached without final response"; +} +``` + + + +### Step 3 - Run! + + + +```python Python +result = openai_with_mcp_server( + mcp_server_url=response.strata_server_url, + user_query="Check my latest 5 emails and summarize them in a Slack message to #general" +) + +print(f"\nšŸ¤– Final Response: {result}") +``` + +```typescript TypeScript +result = await openaiWithMcpServer( + response.strataServerUrl, + "Check my latest emails and summarize them in a Slack message to #updates" +); + +console.log(`\nšŸ¤– Final Response: ${result}`); +``` + + + + +Perfect! You've integrated OpenAI with Klavis MCP servers. + + +## Next Steps + + + + Explore available MCP servers + + + REST endpoints and schemas + + + + +## Useful Resources + +- [OpenAI API Documentation](https://platform.openai.com/docs) +- [OpenAI Function Calling Guide](https://platform.openai.com/docs/guides/function-calling) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) + +**Happy building!** šŸš€ \ No newline at end of file diff --git a/docs/ai-platform-integration/overview.mdx b/docs/ai-platform-integration/overview.mdx new file mode 100644 index 00000000..fef23612 --- /dev/null +++ b/docs/ai-platform-integration/overview.mdx @@ -0,0 +1,86 @@ +--- +title: "Overview" +description: "Integrate Klavis MCP Servers with leading AI platforms to build powerful AI agents" +--- + +import OpenaiCard from '/snippets/ai-platform-card/openai-card.mdx'; +import ClaudeCard from '/snippets/ai-platform-card/claude-card.mdx'; +import GeminiCard from '/snippets/ai-platform-card/gemini-card.mdx'; +import GoogleAdkCard from '/snippets/ai-platform-card/google-adk-card.mdx'; +import LlamaindexCard from '/snippets/ai-platform-card/llamaindex-card.mdx'; +import TogetherAiCard from '/snippets/ai-platform-card/together-ai-card.mdx'; +import FireworksAiCard from '/snippets/ai-platform-card/fireworks-ai-card.mdx'; +import CrewAiCard from '/snippets/ai-platform-card/crewai-card.mdx'; +import LangChainCard from '/snippets/ai-platform-card/langchain-card.mdx'; +import MastraCard from '/snippets/ai-platform-card/mastra-card.mdx'; + +# AI Platform Integrations + +Klavis AI seamlessly integrates with leading AI platforms, enabling you to build sophisticated AI agents that can interact with external services and APIs via MCP. + +## Available Integrations + + + + + + + + + + + + + +## How It Works + +The integration pattern is consistent across all AI platforms: + +1. **Setup**: Configure your AI platform API key and Klavis API key +2. **MCP Instance**: Create MCP server instances for the services you need (YouTube, Gmail, Slack, etc.) +3. **Agent Creation**: Build an AI agent that discovers available tools from MCP servers +4. **Function Calling**: The AI platform's LLM decides which tools to use based on user requests +5. **Tool Execution**: Tools are executed through Klavis API to remote MCP Servers and results are returned to the LLM +6. **Smart Response**: The LLM generates intelligent responses based on tool results + +## Common Use Cases + + + + Summarize YouTube videos, analyze documents, extract insights from web content + + + Send emails, post to Slack, create calendar events, manage contacts + + + Query databases, update spreadsheets, sync information across platforms + + + Create tickets in Jira, update Notion pages, manage Asana tasks + + + Web search, document analysis, code generation, API integrations + + + CRM updates, sales workflows, customer support, reporting + + + +## Getting Started + + + + Select the AI platform that best fits your needs from our available integrations + + + Obtain API keys from both your chosen AI platform and Klavis AI + + + Use our step-by-step integration guides to build your first AI agent + + + Add more MCP servers to give your agent access to additional tools and services + + + +Ready to build intelligent AI agents? Choose your preferred AI platform and start building! \ No newline at end of file diff --git a/docs/ai-platform-integration/together-ai.mdx b/docs/ai-platform-integration/together-ai.mdx new file mode 100644 index 00000000..8ec92880 --- /dev/null +++ b/docs/ai-platform-integration/together-ai.mdx @@ -0,0 +1,409 @@ +--- +title: "Together AI" +description: "Learn how to build AI agents that integrate Together AI's powerful LLMs with Klavis MCP Servers" +--- + +## Prerequisites + +Before we begin, you'll need: + + + + Get your API key from Together AI + + + Get your API key from Klavis AI + + + +## Installation + +First, install the required packages: + + + +```bash Python +pip install together klavis +``` + +```bash TypeScript +npm install together-ai klavis +``` + + + +## Setup Environment Variables + + + +```python Python +import os + +# Set environment variables +os.environ["TOGETHER_API_KEY"] = "your-together-api-key-here" # Replace with your actual Together API key +os.environ["KLAVIS_API_KEY"] = "your-klavis-api-key-here" # Replace with your actual Klavis API key +``` + +```typescript TypeScript +// Set environment variables in your .env file +process.env.TOGETHER_API_KEY = "your-together-api-key-here"; // Replace with your actual Together API key +process.env.KLAVIS_API_KEY = "your-klavis-api-key-here"; // Replace with your actual Klavis API key +``` + + + +## Basic Setup + + + +```python Python +import os +import json +from together import Together +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +# Initialize clients +together_client = Together(api_key=os.getenv("TOGETHER_API_KEY")) +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) +``` + +```typescript TypeScript +import Together from 'together-ai'; +import { KlavisClient, Klavis } from 'klavis'; + +// Initialize clients +const togetherClient = new Together({ apiKey: process.env.TOGETHER_API_KEY }); +const klavisClient = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY }); +``` + + + +## AI Agent with MCP Integration + +Now we'll create an intelligent agent that uses Together AI's powerful LLMs with Klavis MCP servers. This agent will: + +1. **Discover Tools**: Automatically find available tools from MCP servers +2. **Function Calling**: Use Together AI's function calling capabilities +3. **Tool Execution**: Execute tools through Klavis API +4. **Smart Responses**: Generate intelligent responses based on tool results + + + +```python Python +class Agent: + def __init__(self, together_client, klavis_client, mcp_server_url, model="meta-llama/Llama-3.3-70B-Instruct-Turbo"): + self.together = together_client + self.klavis = klavis_client + self.mcp_server_url = mcp_server_url + self.model = model + print(f"šŸ¤– Agent initialized with Together AI model: {self.model}") + + def process_request(self, user_message): + # 1. Get available tools + mcp_tools = self.klavis.mcp_server.list_tools( + server_url=self.mcp_server_url, + + format=ToolFormat.OPENAI + ) + + # 2. Call LLM with tools + messages = [ + {"role": "system", "content": "You are a helpful AI assistant with access to various tools."}, + {"role": "user", "content": user_message} + ] + + response = self.together.chat.completions.create( + model=self.model, + messages=messages, + tools=mcp_tools.tools + ) + + assistant_message = response.choices[0].message + messages.append(assistant_message) + + # 3. If LLM wants to use tools + if assistant_message.tool_calls: + + # Execute each tool call + for tool_call in assistant_message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) + + print(f"šŸ› ļø Calling tool: {tool_name} with args: {tool_args}") + # Call tool via Klavis SDK + tool_result = self.klavis.mcp_server.call_tools( + server_url=self.mcp_server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(tool_result) + }) + + # 4. Get final response from LLM + final_response = self.together.chat.completions.create( + model=self.model, + messages=messages + ) + return final_response.choices[0].message.content + + # If no tools needed, return the assistant message directly + return assistant_message.content +``` + +```typescript TypeScript +class Agent { + private together: Together; + private klavis: KlavisClient; + private mcpServerUrl: string; + private model: string; + + constructor(togetherClient: Together, klavisClient: KlavisClient, mcpServerUrl: string, model: string = "meta-llama/Llama-3.3-70B-Instruct-Turbo") { + this.together = togetherClient; + this.klavis = klavisClient; + this.mcpServerUrl = mcpServerUrl; + this.model = model; + console.log(`šŸ¤– Agent initialized with Together AI model: ${this.model}`); + } + + async processRequest(userMessage: string) { + // 1. Get available tools + const mcpTools = await this.klavis.mcpServer.listTools({ + serverUrl: this.mcpServerUrl, + format: Klavis.ToolFormat.Openai + }); + + // 2. Call LLM with tools + const messages = [ + { role: "system", content: "You are a helpful AI assistant with access to various tools." }, + { role: "user", content: userMessage } + ]; + + const response = await this.together.chat.completions.create({ + model: this.model, + messages: messages, + tools: mcpTools.tools + }); + + const assistantMessage = response.choices[0].message; + messages.push(assistantMessage); + + // 3. If LLM wants to use tools + if (assistantMessage.tool_calls) { + // Execute each tool call + for (const toolCall of assistantMessage.tool_calls) { + const toolName = toolCall.function.name; + const toolArgs = JSON.parse(toolCall.function.arguments); + + console.log(`šŸ› ļø Calling tool: ${toolName} with args:`, toolArgs); + // Call tool via Klavis SDK + const toolResult = await this.klavis.mcpServer.callTools({ + serverUrl: this.mcpServerUrl, + toolName: toolName, + toolArgs: toolArgs + }); + + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(toolResult) + }); + } + + // 4. Get final response from LLM + const finalResponse = await this.together.chat.completions.create({ + model: this.model, + messages: messages + }); + return finalResponse.choices[0].message.content; + } + + // If no tools needed, return the assistant message directly + return assistantMessage.content; + } +} +``` + + + +## Use Case Examples + +### Example 1: Summarize YouTube Video + + + + Set up Together AI and Klavis API clients + + + Create a YouTube MCP server instance + + + Use the agent to analyze and summarize a YouTube video + + + + + +```python Python +# Example YouTube video URL - replace with any video you'd like to analyze +YOUTUBE_VIDEO_URL = "https://www.youtube.com/watch?v=TG6QOa2JJJQ" + +# 1. Create YouTube MCP server instance +youtube_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="1234" +) + +# 2. Create an agent with YouTube MCP server +agent = Agent( + together_client=together_client, + klavis_client=klavis_client, + mcp_server_url=youtube_mcp_instance.server_url, + model="meta-llama/Llama-3.3-70B-Instruct-Turbo" +) + +# 3. Process the request +response = agent.process_request( + f"Please analyze this YouTube video and provide a comprehensive summary with timestamps: {YOUTUBE_VIDEO_URL}" +) + +print(response) +``` + +```typescript TypeScript +// Example YouTube video URL - replace with any video you'd like to analyze +const YOUTUBE_VIDEO_URL = "https://www.youtube.com/watch?v=TG6QOa2JJJQ"; + +// 1. Create YouTube MCP server instance +const youtubeMcpInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Youtube, + userId: "1234" +}); + +// 2. Create an agent with YouTube MCP server +const agent = new Agent( + togetherClient, + klavisClient, + youtubeMcpInstance.serverUrl, + "meta-llama/Llama-3.3-70B-Instruct-Turbo" +); + +// 3. Process the request +const response = await agent.processRequest( + `Please analyze this YouTube video and provide a comprehensive summary with timestamps: ${YOUTUBE_VIDEO_URL}` +); + +console.log(response); +``` + + + +### Example 2: Send Email via Gmail + + +Gmail integration requires OAuth authentication, so you'll need to authorize the application in your browser. + + + + +```python Python +import webbrowser + +# Create Gmail MCP server instance +gmail_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="1234" +) + +# Redirect to Gmail OAuth page for authorization +webbrowser.open(gmail_mcp_instance.oauth_url) +print(f"šŸ” Opening OAuth authorization for Gmail") +print(f"If you are not redirected automatically, please open this URL: {gmail_mcp_instance.oauth_url}") + +# Email configuration +EMAIL_RECIPIENT = "recipient@example.com" # Replace with the recipient's email +EMAIL_SUBJECT = "Greetings from Together AI + Klavis Integration" +EMAIL_BODY = "This is a test email sent using the Together AI and Klavis AI integration. The email was sent automatically by your AI agent!" + +# After OAuth authorization is complete, create the Gmail agent +gmail_agent = Agent( + together_client=together_client, + klavis_client=klavis_client, + mcp_server_url=gmail_mcp_instance.server_url, + model="Qwen/Qwen2.5-72B-Instruct-Turbo" +) + +# Send the email +response = gmail_agent.process_request( + f"Please send an email to {EMAIL_RECIPIENT} with the subject '{EMAIL_SUBJECT}' and the following body: '{EMAIL_BODY}'" +) + +print(response) +``` + +```typescript TypeScript +// Create Gmail MCP server instance +const gmailMcpInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "1234" +}); + +// Redirect to Gmail OAuth page for authorization +console.log("šŸ” Opening OAuth authorization for Gmail"); +console.log(`If you are not redirected automatically, please open this URL: ${gmailMcpInstance.oauthUrl}`); +// In a web environment, you might redirect the user +window.open(gmailMcpInstance.oauthUrl); + +// Email configuration +const EMAIL_RECIPIENT = "recipient@example.com"; // Replace with the recipient's email +const EMAIL_SUBJECT = "Greetings from Together AI + Klavis Integration"; +const EMAIL_BODY = "This is a test email sent using the Together AI and Klavis AI integration. The email was sent automatically by your AI agent!"; + +// After OAuth authorization is complete, create the Gmail agent +const gmailAgent = new Agent( + togetherClient, + klavisClient, + gmailMcpInstance.serverUrl, + "Qwen/Qwen2.5-72B-Instruct-Turbo" +); + +// Send the email +const response = await gmailAgent.processRequest( + `Please send an email to ${EMAIL_RECIPIENT} with the subject '${EMAIL_SUBJECT}' and the following body: '${EMAIL_BODY}'` +); + +console.log(response); +``` + + + +## Next Steps + + + + Try other available servers like Slack, Notion, CRM etc. + + + Test various Together AI models for different use cases. + + + Create sophisticated agents that combine multiple services + + + Scale these patterns for production applications + + + +## Useful Resources + +- [Together AI Documentation](https://docs.together.ai/) +- [Klavis AI Documentation](https://www.klavis.ai/docs/) +- [MCP Protocol Specification](https://modelcontextprotocol.io/) +- [Together AI Models](https://docs.together.ai/docs/inference-models) +- [Klavis MCP Servers](/mcp-server) + +**Happy building with Together AI and Klavis!** šŸš€ \ No newline at end of file diff --git a/docs/api-reference/api_key.mdx b/docs/api-reference/api_key.mdx new file mode 100644 index 00000000..7d61325c --- /dev/null +++ b/docs/api-reference/api_key.mdx @@ -0,0 +1,13 @@ +--- +title: "Klavis API Key" +description: "How to get your API key for authentication" +--- + +### Personal Account +1. Go to **[Dashboard](https://www.klavis.ai/home/)** +2. Navigate to **API KEY** section + +### Team Account +1. Go to **[Dashboard](https://www.klavis.ai/home/)** +2. Navigate to **Team** +3. Access **API KEY** section diff --git a/docs/api-reference/introduction.mdx b/docs/api-reference/introduction.mdx new file mode 100644 index 00000000..ad8a03d9 --- /dev/null +++ b/docs/api-reference/introduction.mdx @@ -0,0 +1,44 @@ +--- +title: "Introduction" +description: "Klavis provides API for developers to integrate MCP to your AI application." +--- + +## Base URL + +The Klavis API is built on REST principles. We enforce HTTPS in every request to improve data security, integrity, and privacy. The API does not support HTTP. + +All requests contain the following base URL: + +```bash +https://api.klavis.ai +``` + +## Authentication + +To authenticate you need to add an Authorization header with the contents of the header being Bearer key_123456789 where key_123456789 is your API Key. + +```bash +Authorization: Bearer key_123456789 +``` + +## Response codes + +Klavis uses standard HTTP codes to indicate the success or failure of your requests. + +In general, 2xx HTTP codes correspond to success, 4xx codes are for user-related failures, and 5xx codes are for infrastructure issues. + +| Status | Description | +| ------ | --------------------------------------- | +| 200 | Successful request. | +| 400 | Check that the parameters were correct. | +| 401 | The API key used was missing. | +| 403 | The API key used was invalid. | +| 404 | The resource was not found. | +| 429 | The rate limit was exceeded. | +| 5xx | Indicates an error with Klavis servers. | + +Check Error Codes for a comprehensive breakdown of all possible API errors. + +## Rate limit + +The default maximum rate limit is 2 requests per second. This number can be increased for trusted senders by request. After that, you'll hit the rate limit and receive a 429 response error code. \ No newline at end of file diff --git a/docs/api-reference/mcp-server/call-tool.mdx b/docs/api-reference/mcp-server/call-tool.mdx new file mode 100644 index 00000000..c358ae79 --- /dev/null +++ b/docs/api-reference/mcp-server/call-tool.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/call-tool +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/create-a-self-hosted-mcp-server-instance.mdx b/docs/api-reference/mcp-server/create-a-self-hosted-mcp-server-instance.mdx new file mode 100644 index 00000000..2b182f92 --- /dev/null +++ b/docs/api-reference/mcp-server/create-a-self-hosted-mcp-server-instance.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/self-hosted/instance/create +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/create-a-server-instance.mdx b/docs/api-reference/mcp-server/create-a-server-instance.mdx new file mode 100644 index 00000000..56fbe23c --- /dev/null +++ b/docs/api-reference/mcp-server/create-a-server-instance.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/instance/create +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/delete-a-server-instance.mdx b/docs/api-reference/mcp-server/delete-a-server-instance.mdx new file mode 100644 index 00000000..49414ab0 --- /dev/null +++ b/docs/api-reference/mcp-server/delete-a-server-instance.mdx @@ -0,0 +1,3 @@ +--- +openapi: delete /mcp-server/instance/{instanceId} +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/delete-auth-data-for-a-server-instance.mdx b/docs/api-reference/mcp-server/delete-auth-data-for-a-server-instance.mdx new file mode 100644 index 00000000..7d39a1d5 --- /dev/null +++ b/docs/api-reference/mcp-server/delete-auth-data-for-a-server-instance.mdx @@ -0,0 +1,3 @@ +--- +openapi: delete /mcp-server/instance/{instanceId}/auth +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/delete-instance_auth.mdx b/docs/api-reference/mcp-server/delete-instance_auth.mdx new file mode 100644 index 00000000..f2e09f92 --- /dev/null +++ b/docs/api-reference/mcp-server/delete-instance_auth.mdx @@ -0,0 +1,4 @@ +--- +openapi: delete /mcp-server/instance/{instanceId}/auth +--- + diff --git a/docs/api-reference/mcp-server/get-all-servers.mdx b/docs/api-reference/mcp-server/get-all-servers.mdx new file mode 100644 index 00000000..ca1cf4d9 --- /dev/null +++ b/docs/api-reference/mcp-server/get-all-servers.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/servers +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/get-instance_auth.mdx b/docs/api-reference/mcp-server/get-instance_auth.mdx new file mode 100644 index 00000000..66a870bf --- /dev/null +++ b/docs/api-reference/mcp-server/get-instance_auth.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/instance/{instanceId}/auth +--- diff --git a/docs/api-reference/mcp-server/get-server-instance.mdx b/docs/api-reference/mcp-server/get-server-instance.mdx new file mode 100644 index 00000000..954ae2bf --- /dev/null +++ b/docs/api-reference/mcp-server/get-server-instance.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/instance/{instanceId} +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/get-tools.mdx b/docs/api-reference/mcp-server/get-tools.mdx new file mode 100644 index 00000000..02e693e6 --- /dev/null +++ b/docs/api-reference/mcp-server/get-tools.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/tools/{serverName} +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/list-tools.mdx b/docs/api-reference/mcp-server/list-tools.mdx new file mode 100644 index 00000000..195584b5 --- /dev/null +++ b/docs/api-reference/mcp-server/list-tools.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/list-tools +--- \ No newline at end of file diff --git a/docs/api-reference/mcp-server/set-instance_auth.mdx b/docs/api-reference/mcp-server/set-instance_auth.mdx new file mode 100644 index 00000000..ed0e9dea --- /dev/null +++ b/docs/api-reference/mcp-server/set-instance_auth.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/instance/set-auth +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/airtable-oauth/authorize-airtable.mdx b/docs/api-reference/oauth/airtable-oauth/authorize-airtable.mdx new file mode 100644 index 00000000..1b7a8dc5 --- /dev/null +++ b/docs/api-reference/oauth/airtable-oauth/authorize-airtable.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/airtable/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/asana-oauth/authorize-asana.mdx b/docs/api-reference/oauth/asana-oauth/authorize-asana.mdx new file mode 100644 index 00000000..6030e9f8 --- /dev/null +++ b/docs/api-reference/oauth/asana-oauth/authorize-asana.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/asana/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/attio-oauth/authorize-attio.mdx b/docs/api-reference/oauth/attio-oauth/authorize-attio.mdx new file mode 100644 index 00000000..09aa1d0d --- /dev/null +++ b/docs/api-reference/oauth/attio-oauth/authorize-attio.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/attio/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/box-oauth/authorize-box.mdx b/docs/api-reference/oauth/box-oauth/authorize-box.mdx new file mode 100644 index 00000000..f63e4f40 --- /dev/null +++ b/docs/api-reference/oauth/box-oauth/authorize-box.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/box/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/calcom-oauth/authorize-calcom.mdx b/docs/api-reference/oauth/calcom-oauth/authorize-calcom.mdx new file mode 100644 index 00000000..1e258064 --- /dev/null +++ b/docs/api-reference/oauth/calcom-oauth/authorize-calcom.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/calcom/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/canva-oauth/authorize-canva.mdx b/docs/api-reference/oauth/canva-oauth/authorize-canva.mdx new file mode 100644 index 00000000..46c9f966 --- /dev/null +++ b/docs/api-reference/oauth/canva-oauth/authorize-canva.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/canva/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/clickup-oauth/authorize-clickup.mdx b/docs/api-reference/oauth/clickup-oauth/authorize-clickup.mdx new file mode 100644 index 00000000..00db743d --- /dev/null +++ b/docs/api-reference/oauth/clickup-oauth/authorize-clickup.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/clickup/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/close-oauth/authorize-close.mdx b/docs/api-reference/oauth/close-oauth/authorize-close.mdx new file mode 100644 index 00000000..7248d758 --- /dev/null +++ b/docs/api-reference/oauth/close-oauth/authorize-close.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/close/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/confluence-oauth/authorize-confluence.mdx b/docs/api-reference/oauth/confluence-oauth/authorize-confluence.mdx new file mode 100644 index 00000000..ddf99d4f --- /dev/null +++ b/docs/api-reference/oauth/confluence-oauth/authorize-confluence.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/confluence/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/dialpad-oauth/authorize-dialpad.mdx b/docs/api-reference/oauth/dialpad-oauth/authorize-dialpad.mdx new file mode 100644 index 00000000..cd9e1c39 --- /dev/null +++ b/docs/api-reference/oauth/dialpad-oauth/authorize-dialpad.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/dialpad/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/docusign-oauth/authorize-docusign.mdx b/docs/api-reference/oauth/docusign-oauth/authorize-docusign.mdx new file mode 100644 index 00000000..0091eb59 --- /dev/null +++ b/docs/api-reference/oauth/docusign-oauth/authorize-docusign.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/docusign/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/dropbox-oauth/authorize-dropbox.mdx b/docs/api-reference/oauth/dropbox-oauth/authorize-dropbox.mdx new file mode 100644 index 00000000..27795866 --- /dev/null +++ b/docs/api-reference/oauth/dropbox-oauth/authorize-dropbox.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/dropbox/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/figma-oauth/authorize-figma.mdx b/docs/api-reference/oauth/figma-oauth/authorize-figma.mdx new file mode 100644 index 00000000..b9a68947 --- /dev/null +++ b/docs/api-reference/oauth/figma-oauth/authorize-figma.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/figma/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gcalendar-oauth/authorize-gcalendar.mdx b/docs/api-reference/oauth/gcalendar-oauth/authorize-gcalendar.mdx new file mode 100644 index 00000000..b914bcab --- /dev/null +++ b/docs/api-reference/oauth/gcalendar-oauth/authorize-gcalendar.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gcalendar/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gdocs-oauth/authorize-gdocs.mdx b/docs/api-reference/oauth/gdocs-oauth/authorize-gdocs.mdx new file mode 100644 index 00000000..3ab6fc21 --- /dev/null +++ b/docs/api-reference/oauth/gdocs-oauth/authorize-gdocs.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gdocs/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gdrive-oauth/authorize-gdrive.mdx b/docs/api-reference/oauth/gdrive-oauth/authorize-gdrive.mdx new file mode 100644 index 00000000..6260eb5e --- /dev/null +++ b/docs/api-reference/oauth/gdrive-oauth/authorize-gdrive.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gdrive/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/github-oauth/authorize-github.mdx b/docs/api-reference/oauth/github-oauth/authorize-github.mdx new file mode 100644 index 00000000..f07f3278 --- /dev/null +++ b/docs/api-reference/oauth/github-oauth/authorize-github.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/github/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gitlab-oauth/authorize-gitlab.mdx b/docs/api-reference/oauth/gitlab-oauth/authorize-gitlab.mdx new file mode 100644 index 00000000..b82e5713 --- /dev/null +++ b/docs/api-reference/oauth/gitlab-oauth/authorize-gitlab.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gitlab/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gmail-oauth/authorize-gmail.mdx b/docs/api-reference/oauth/gmail-oauth/authorize-gmail.mdx new file mode 100644 index 00000000..480af99f --- /dev/null +++ b/docs/api-reference/oauth/gmail-oauth/authorize-gmail.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gmail/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/gsheets-oauth/authorize-gsheets.mdx b/docs/api-reference/oauth/gsheets-oauth/authorize-gsheets.mdx new file mode 100644 index 00000000..ea0087b6 --- /dev/null +++ b/docs/api-reference/oauth/gsheets-oauth/authorize-gsheets.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/gsheets/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/hubspot-oauth/authorize-hubspot.mdx b/docs/api-reference/oauth/hubspot-oauth/authorize-hubspot.mdx new file mode 100644 index 00000000..d9ce79e6 --- /dev/null +++ b/docs/api-reference/oauth/hubspot-oauth/authorize-hubspot.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/hubspot/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/jira-oauth/authorize-jira.mdx b/docs/api-reference/oauth/jira-oauth/authorize-jira.mdx new file mode 100644 index 00000000..ec8da1b4 --- /dev/null +++ b/docs/api-reference/oauth/jira-oauth/authorize-jira.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/jira/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/klaviyo-oauth/authorize-klaviyo.mdx b/docs/api-reference/oauth/klaviyo-oauth/authorize-klaviyo.mdx new file mode 100644 index 00000000..1be1922f --- /dev/null +++ b/docs/api-reference/oauth/klaviyo-oauth/authorize-klaviyo.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/klaviyo/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/linear-oauth/authorize-linear.mdx b/docs/api-reference/oauth/linear-oauth/authorize-linear.mdx new file mode 100644 index 00000000..8766b77d --- /dev/null +++ b/docs/api-reference/oauth/linear-oauth/authorize-linear.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/linear/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/linkedin-oauth/authorize-linkedin.mdx b/docs/api-reference/oauth/linkedin-oauth/authorize-linkedin.mdx new file mode 100644 index 00000000..bf8a1c64 --- /dev/null +++ b/docs/api-reference/oauth/linkedin-oauth/authorize-linkedin.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/linkedin/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/notion-oauth/authorize-notion.mdx b/docs/api-reference/oauth/notion-oauth/authorize-notion.mdx new file mode 100644 index 00000000..fb2dd3df --- /dev/null +++ b/docs/api-reference/oauth/notion-oauth/authorize-notion.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/notion/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/onedrive-oauth/authorize-onedrive.mdx b/docs/api-reference/oauth/onedrive-oauth/authorize-onedrive.mdx new file mode 100644 index 00000000..dfbdb4b5 --- /dev/null +++ b/docs/api-reference/oauth/onedrive-oauth/authorize-onedrive.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/onedrive/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/onedrive-oauth/authorize.mdx b/docs/api-reference/oauth/onedrive-oauth/authorize.mdx new file mode 100644 index 00000000..dfbdb4b5 --- /dev/null +++ b/docs/api-reference/oauth/onedrive-oauth/authorize.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/onedrive/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/outlook-oauth/authorize-outlook.mdx b/docs/api-reference/oauth/outlook-oauth/authorize-outlook.mdx new file mode 100644 index 00000000..31a6f214 --- /dev/null +++ b/docs/api-reference/oauth/outlook-oauth/authorize-outlook.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/outlook/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/outlook-oauth/authorize.mdx b/docs/api-reference/oauth/outlook-oauth/authorize.mdx new file mode 100644 index 00000000..31a6f214 --- /dev/null +++ b/docs/api-reference/oauth/outlook-oauth/authorize.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/outlook/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/pagerduty-oauth/authorize-pagerduty.mdx b/docs/api-reference/oauth/pagerduty-oauth/authorize-pagerduty.mdx new file mode 100644 index 00000000..12214832 --- /dev/null +++ b/docs/api-reference/oauth/pagerduty-oauth/authorize-pagerduty.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/pagerduty/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/pipedrive-oauth/authorize-pipedrive.mdx b/docs/api-reference/oauth/pipedrive-oauth/authorize-pipedrive.mdx new file mode 100644 index 00000000..a917ed5e --- /dev/null +++ b/docs/api-reference/oauth/pipedrive-oauth/authorize-pipedrive.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/pipedrive/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/quickbooks-oauth/authorize-quickbooks.mdx b/docs/api-reference/oauth/quickbooks-oauth/authorize-quickbooks.mdx new file mode 100644 index 00000000..9dba4611 --- /dev/null +++ b/docs/api-reference/oauth/quickbooks-oauth/authorize-quickbooks.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/quickbooks/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/salesforce-oauth/authorize-salesforce.mdx b/docs/api-reference/oauth/salesforce-oauth/authorize-salesforce.mdx new file mode 100644 index 00000000..4376e170 --- /dev/null +++ b/docs/api-reference/oauth/salesforce-oauth/authorize-salesforce.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/salesforce/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/shopify-oauth/authorize-shopify.mdx b/docs/api-reference/oauth/shopify-oauth/authorize-shopify.mdx new file mode 100644 index 00000000..736eb195 --- /dev/null +++ b/docs/api-reference/oauth/shopify-oauth/authorize-shopify.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/shopify/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/slack-oauth/authorize-slack.mdx b/docs/api-reference/oauth/slack-oauth/authorize-slack.mdx new file mode 100644 index 00000000..87a22d0c --- /dev/null +++ b/docs/api-reference/oauth/slack-oauth/authorize-slack.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/slack/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/stripe-connect-oauth/authorize-stripe-connect.mdx b/docs/api-reference/oauth/stripe-connect-oauth/authorize-stripe-connect.mdx new file mode 100644 index 00000000..8ca059c4 --- /dev/null +++ b/docs/api-reference/oauth/stripe-connect-oauth/authorize-stripe-connect.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/stripe/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/supabase-oauth/authorize-supabase.mdx b/docs/api-reference/oauth/supabase-oauth/authorize-supabase.mdx new file mode 100644 index 00000000..e0d82e34 --- /dev/null +++ b/docs/api-reference/oauth/supabase-oauth/authorize-supabase.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/supabase/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/vercel-oauth/authorize-vercel.mdx b/docs/api-reference/oauth/vercel-oauth/authorize-vercel.mdx new file mode 100644 index 00000000..c162c591 --- /dev/null +++ b/docs/api-reference/oauth/vercel-oauth/authorize-vercel.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/vercel/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/wordpress-oauth/authorize-wordpress.mdx b/docs/api-reference/oauth/wordpress-oauth/authorize-wordpress.mdx new file mode 100644 index 00000000..fcead937 --- /dev/null +++ b/docs/api-reference/oauth/wordpress-oauth/authorize-wordpress.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/wordpress/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/xero-oauth/authorize-xero.mdx b/docs/api-reference/oauth/xero-oauth/authorize-xero.mdx new file mode 100644 index 00000000..59b0ad3b --- /dev/null +++ b/docs/api-reference/oauth/xero-oauth/authorize-xero.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/xero/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/oauth/zendesk-oauth/authorize-zendesk.mdx b/docs/api-reference/oauth/zendesk-oauth/authorize-zendesk.mdx new file mode 100644 index 00000000..3f89313e --- /dev/null +++ b/docs/api-reference/oauth/zendesk-oauth/authorize-zendesk.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /oauth/zendesk/authorize +--- \ No newline at end of file diff --git a/docs/api-reference/openapi.json b/docs/api-reference/openapi.json new file mode 100644 index 00000000..242b2cbc --- /dev/null +++ b/docs/api-reference/openapi.json @@ -0,0 +1,7474 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "Klavis AI (https://www.klavis.ai)", + "description": "Klavis AI - Open Source MCP Integrations for AI Applications", + "version": "0.1.0" + }, + "servers": [ + { + "url": "https://api.klavis.ai/", + "description": "US Production server" + }, + { + "url": "https://api.eu.klavis.ai/", + "description": "EU Production server" + } + ], + "paths": { + "/mcp-server/call-tool": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Call Tool", + "description": "Calls a tool on a specific remote MCP server, used for function calling. Eliminates the need for manual MCP code implementation.\nUnder the hood, Klavis will instantiates an MCP client and establishes a connection with the remote MCP server to call the tool.", + "operationId": "callServerTool", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CallToolRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CallToolResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/list-tools": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "List Tools", + "description": "Lists all tools available for a specific remote MCP server in various AI model formats.\n\nThis eliminates the need for manual MCP code implementation and format conversion.\nUnder the hood, Klavis instantiates an MCP client and establishes a connection \nwith the remote MCP server to retrieve available tools.", + "operationId": "listServerTools", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListToolsRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListToolsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/strata/create": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Create", + "description": "Create a Strata MCP server.\n\nParameters:\n- servers: Can be 'ALL' to add all available Klavis integration, a list of specific server names, or null to add no servers\n- externalServers: Optional list of external MCP servers to validate and add", + "operationId": "create_strata_server_mcp_server_strata_create_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataCreateResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/strata/add": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Add", + "description": "Add servers to an existing Strata MCP server.\n\nNote: After adding servers, you need to reconnect the MCP server so that list_tool can be updated with the new servers.\n\nParameters:\n- servers: Can be 'ALL' to add all available servers, a list of specific server names, or null to add no servers\n- externalServers: Optional list of external MCP servers to validate and add", + "operationId": "add_servers_to_strata_mcp_server_strata_add_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataAddServersRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataAddServersResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/strata/{strataId}/servers": { + "delete": { + "tags": [ + "mcp-server" + ], + "summary": "Delete", + "description": "Delete servers from an existing Strata MCP server.\n\nNote: After deleting servers, you need to reconnect the MCP server so that list_tool can be updated to reflect the removed servers.\n\nParameters:\n- strataId: The strata server ID (path parameter)\n- servers: Can be 'ALL' to delete all available Klavis integration, a list of specific server names, or null to delete no servers\n- externalServers: Query parameter - comma-separated list of external server names to delete\n\nReturns separate lists for deleted Klavis servers and deleted external servers.", + "operationId": "delete_servers_from_strata_mcp_server_strata__strataId__servers_delete", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "strataId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Strataid" + } + }, + { + "name": "servers", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/components/schemas/McpServerName" + }, + { + "const": "ALL", + "type": "string" + } + ] + } + }, + { + "type": "null" + } + ], + "description": "List of Klavis integration to delete (e.g., 'jira', 'linear'), 'ALL' to delete all Klavis integration, or null to delete no servers.", + "title": "Servers" + }, + "description": "List of Klavis integration to delete (e.g., 'jira', 'linear'), 'ALL' to delete all Klavis integration, or null to delete no servers." + }, + { + "name": "externalServers", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Comma-separated list of external server names to delete", + "title": "Externalservers" + }, + "description": "Comma-separated list of external server names to delete" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataDeleteServersResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/strata/{strataId}": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get", + "description": "Get information about an existing Strata MCP server instance.\n\nReturns the strata URL, connected klavis servers, connected external servers (with URLs), \nand authentication URLs for klavis servers.", + "operationId": "get_strata_server_mcp_server_strata__strataId__get", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "strataId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Strataid" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataGetResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/strata/{strataId}/raw-actions": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "List Raw Actions", + "description": "Fetch raw actions (all underlying actions) for a specific integration within a Strata MCP instance.", + "operationId": "list_strata_raw_actions_mcp_server_strata__strataId__raw_actions_get", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "strataId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "description": "The strata server ID", + "title": "Strataid" + }, + "description": "The strata server ID" + }, + { + "name": "server", + "in": "query", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "minLength": 1, + "description": "The name of the server to fetch raw actions for" + }, + "description": "The name of the server to fetch raw actions for" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataRawActionsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/strata/{strataId}/auth/{serverName}": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get Strata Auth", + "description": "Retrieves authentication data for a specific integration within a Strata MCP server.\n\nReturns the authentication data if available, along with authentication status.", + "operationId": "getStrataAuth", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "strataId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "description": "The strata server ID", + "title": "Strataid" + }, + "description": "The strata server ID" + }, + { + "name": "serverName", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the Klavis MCP server to get authentication for (e.g., 'GitHub', 'Jira')" + }, + "description": "The name of the Klavis MCP server to get authentication for (e.g., 'GitHub', 'Jira')" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataGetAuthResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "mcp-server" + ], + "summary": "Delete Strata Auth", + "description": "Deletes authentication data for a specific integration within a Strata MCP server.\n\nThis will clear the stored authentication credentials, effectively unauthenticating the server.", + "operationId": "deleteStrataAuth", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "strataId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "description": "The strata server ID", + "title": "Strataid" + }, + "description": "The strata server ID" + }, + { + "name": "serverName", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the Klavis MCP server to delete authentication for (e.g., 'github', 'jira')" + }, + "description": "The name of the Klavis MCP server to delete authentication for (e.g., 'github', 'jira')" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/strata/set-auth": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Set Strata Auth", + "description": "Sets authentication data for a specific integration within a Strata MCP server.\n\nAccepts either API key authentication or general authentication data.", + "operationId": "set_strata_auth_mcp_server_strata_set_auth_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StrataSetAuthRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/instance/create": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Create", + "description": "Creates a URL for a specified MCP server,\nvalidating the request with an API key and user details.\nReturns the existing server URL if it already exists for the user.\nIf OAuth is configured for the server, also returns the base OAuth authorization URL.\nNote that some servers have hundreds of tools and therefore only expose the Strata tools.", + "operationId": "createServerInstance", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateServerRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateServerResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/self-hosted/instance/create": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Create", + "description": "Creates an instance id for a self-hosted MCP server,\nvalidating the request with an API key and user details.\nThe main purpose of this endpoint is to create an instance id for a self-hosted MCP server.\nThe instance id is used to identify and store the auth metadata in the database.\nReturns the existing instance id if it already exists for the user.", + "operationId": "createSelfHostedServerInstance", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSelfHostedServerRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSelfHostedServerResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/instance/{instanceId}": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get", + "description": "Checks the details of a specific server connection instance using its unique ID and API key,\nreturning server details like authentication status and associated server/platform info.", + "operationId": "getServerInstance", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "instanceId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "description": "The ID of the connection integration instance whose status is being checked. This is returned by the Create API.", + "title": "Instanceid" + }, + "description": "The ID of the connection integration instance whose status is being checked. This is returned by the Create API." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetInstanceResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "mcp-server" + ], + "summary": "Delete", + "description": "Completely removes a server connection instance using its unique ID,\ndeleting all associated data from the system.", + "operationId": "deleteServerInstance", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "instanceId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "description": "The ID of the connection instance to delete.", + "title": "Instanceid" + }, + "description": "The ID of the connection instance to delete." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/instance/{instanceId}/auth": { + "delete": { + "tags": [ + "mcp-server" + ], + "summary": "Delete Instance Auth", + "description": "Deletes authentication data for a specific server connection instance.", + "operationId": "deleteInstanceAuth", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "instanceId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "description": "The ID of the connection instance to delete auth for.", + "title": "Instanceid" + }, + "description": "The ID of the connection instance to delete auth for." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get Instance Auth", + "description": "Retrieves the auth data for a specific integration instance that the API key owner controls.\nIncludes access token, refresh token, and other authentication data.\n\nThis endpoint includes proper ownership verification to ensure users can only access\nauthentication data for integration instances they own. It also handles token refresh if needed.", + "operationId": "getInstanceAuthData", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "instanceId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "description": "The ID of the connection integration instance to get auth data for.", + "title": "Instanceid" + }, + "description": "The ID of the connection integration instance to get auth data for." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetAuthDataResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/tools/{serverName}": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get Tools", + "description": "Get tools information for any MCP server.", + "operationId": "getServerTools", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "serverName", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." + }, + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." + }, + { + "name": "format", + "in": "query", + "required": false, + "schema": { + "$ref": "#/components/schemas/ToolFormat", + "description": "The format to return tools in. Default is MCP Native format for maximum compatibility.", + "default": "mcp_native" + }, + "description": "The format to return tools in. Default is MCP Native format for maximum compatibility." + }, + { + "name": "legacy", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "description": "Whether to use the legacy server. Default is False.", + "default": false, + "title": "Legacy" + }, + "description": "Whether to use the legacy server. Default is False." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListToolsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/mcp-server/servers": { + "get": { + "tags": [ + "mcp-server" + ], + "summary": "Get All Servers", + "description": "Get all MCP servers with their basic information including id, name, description, and tools.", + "operationId": "getAllMcpServers", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetMcpServersResponse" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/mcp-server/instance/set-auth": { + "post": { + "tags": [ + "mcp-server" + ], + "summary": "Set Instance Auth", + "description": "Sets authentication data for a specific integration instance.\nAccepts either API key authentication or general authentication data.\nThis updates the auth_metadata for the specified integration instance.", + "operationId": "setInstanceAuth", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetAuthRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/white-labeling/create": { + "post": { + "tags": [ + "white-labeling" + ], + "summary": "Create", + "description": "Saves OAuth white labeling information, or updates existing information if the `client_id` matches.", + "operationId": "createWhiteLabeling", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateWhiteLabelingRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WhiteLabelingResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/white-labeling/get/{client_id}": { + "get": { + "tags": [ + "white-labeling" + ], + "summary": "Get", + "description": "Retrieves white labeling information for a specific OAuth client ID.", + "operationId": "getWhiteLabelingByClientId", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "client_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Client Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WhiteLabelingResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/user/{userId}/integrations": { + "get": { + "tags": [ + "user" + ], + "summary": "Get User Integrations", + "description": "Get all available integrations (MCP server names) by user ID.\nReturns a list of integration names as McpServerName types.", + "operationId": "getUserIntegrations", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "userId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "description": "The external user ID", + "title": "Userid" + }, + "description": "The external user ID" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetUserIntegrationsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/user/{userId}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get User", + "description": "Get user information by user_id.", + "operationId": "getUserByUserId", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "userId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "description": "The identifier for the user to fetch.", + "title": "Userid" + }, + "description": "The identifier for the user to fetch." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetUserResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "user" + ], + "summary": "Delete User", + "description": "Delete a user and all associated data by user_id.\nUsers cannot delete their own accounts.\nThis operation will permanently remove all user data.", + "operationId": "deleteUserByUserId", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "userId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "description": "The identifier for the user to delete.", + "title": "Userid" + }, + "description": "The identifier for the user to delete." + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteUserResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/user/": { + "get": { + "tags": [ + "user" + ], + "summary": "Get All Users", + "description": "Retrieve all users that have been created under your account, with support for pagination.", + "operationId": "getAllUsers", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 1000, + "minimum": 1, + "description": "Number of results per page (max 1000)", + "default": 50, + "title": "Page Size" + }, + "description": "Number of results per page (max 1000)" + }, + { + "name": "page_number", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number to retrieve (starting from 1)", + "default": 1, + "title": "Page Number" + }, + "description": "Page number to retrieve (starting from 1)" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetAllUsersResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/user/set-auth": { + "post": { + "tags": [ + "user" + ], + "summary": "Set User Auth", + "description": "Sets authentication data for a specific integration for a user.\n\nAccepts either API key authentication or general authentication data.\nThis updates the auth_metadata for the specified user's integration instance.", + "operationId": "setUserAuth", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetUserAuthRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBearer": [] + } + ] + } + }, + "/user/{userId}/auth/{serverName}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get User Auth", + "description": "Retrieves authentication data for a specific integration for a user.\n\nReturns the authentication data if available, along with authentication status.\nIncludes token refresh handling if needed.", + "operationId": "getUserAuth", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "userId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "description": "The identifier for the user", + "title": "Userid" + }, + "description": "The identifier for the user" + }, + { + "name": "serverName", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the MCP server (e.g., 'GitHub', 'Jira')" + }, + "description": "The name of the MCP server (e.g., 'GitHub', 'Jira')" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetUserAuthResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "user" + ], + "summary": "Delete User Auth", + "description": "Deletes authentication data for a specific integration for a user.\n\nThis will clear the stored authentication credentials, effectively unauthenticating the integration.", + "operationId": "deleteUserAuth", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "userId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "description": "The unique identifier for the user", + "title": "Userid" + }, + "description": "The unique identifier for the user" + }, + { + "name": "serverName", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the MCP server to delete authentication for (e.g., 'github', 'jira')" + }, + "description": "The name of the MCP server to delete authentication for (e.g., 'github', 'jira')" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StatusResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/slack/authorize": { + "get": { + "tags": [ + "slack-oauth" + ], + "summary": "Authorize Slack", + "description": "Start Slack OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- user_scope: Optional user-specific scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeSlack", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "user_scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional user-specific OAuth scopes to request (comma-separated string)", + "title": "User Scope" + }, + "description": "Optional user-specific OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/github/authorize": { + "get": { + "tags": [ + "github-oauth" + ], + "summary": "Authorize Github", + "description": "Start GitHub OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGithub", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gitlab/authorize": { + "get": { + "tags": [ + "gitlab-oauth" + ], + "summary": "Authorize Gitlab", + "description": "Start GitLab OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGitlab", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/supabase/authorize": { + "get": { + "tags": [ + "supabase-oauth" + ], + "summary": "Authorize Supabase", + "description": "Start Supabase OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeSupabase", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/notion/authorize": { + "get": { + "tags": [ + "notion-oauth" + ], + "summary": "Authorize Notion", + "description": "Start Notion OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeNotion", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/jira/authorize": { + "get": { + "tags": [ + "jira-oauth" + ], + "summary": "Authorize Jira", + "description": "Start Jira OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeJira", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/confluence/authorize": { + "get": { + "tags": [ + "confluence-oauth" + ], + "summary": "Authorize Confluence", + "description": "Start Confluence OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeConfluence", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/wordpress/authorize": { + "get": { + "tags": [ + "wordpress-oauth" + ], + "summary": "Authorize Wordpress", + "description": "Start WordPress OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeWordpress", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gmail/authorize": { + "get": { + "tags": [ + "gmail-oauth" + ], + "summary": "Authorize Gmail", + "description": "Start Gmail OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGmail", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gdrive/authorize": { + "get": { + "tags": [ + "gdrive-oauth" + ], + "summary": "Authorize Gdrive", + "description": "Start Google Drive OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGDrive", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gcalendar/authorize": { + "get": { + "tags": [ + "gcalendar-oauth" + ], + "summary": "Authorize Gcalendar", + "description": "Start Google Calendar OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGCalendar", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gsheets/authorize": { + "get": { + "tags": [ + "gsheets-oauth" + ], + "summary": "Authorize Gsheets", + "description": "Start Google Sheets OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGSheets", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/gdocs/authorize": { + "get": { + "tags": [ + "gdocs-oauth" + ], + "summary": "Authorize Gdocs", + "description": "Start Google Docs OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeGDocs", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/attio/authorize": { + "get": { + "tags": [ + "attio-oauth" + ], + "summary": "Authorize Attio", + "description": "Start Attio OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeAttio", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/salesforce/authorize": { + "get": { + "tags": [ + "salesforce-oauth" + ], + "summary": "Authorize Salesforce", + "description": "Start Salesforce OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes\n- instance_url: Optional Salesforce instance URL for sandbox or custom domains", + "operationId": "authorizeSalesforce", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + }, + { + "name": "instance_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Salesforce instance URL (e.g., https://mycompany.salesforce.com). If not provided, will use default login.salesforce.com", + "title": "Instance Url" + }, + "description": "Salesforce instance URL (e.g., https://mycompany.salesforce.com). If not provided, will use default login.salesforce.com" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/asana/authorize": { + "get": { + "tags": [ + "asana-oauth" + ], + "summary": "Authorize Asana", + "description": "Start Asana OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeAsana", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/linear/authorize": { + "get": { + "tags": [ + "linear-oauth" + ], + "summary": "Authorize Linear", + "description": "Start Linear OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeLinear", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/close/authorize": { + "get": { + "tags": [ + "close-oauth" + ], + "summary": "Authorize Close", + "description": "Start Close OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeClose", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/clickup/authorize": { + "get": { + "tags": [ + "clickup-oauth" + ], + "summary": "Authorize Clickup", + "description": "Start ClickUp OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeClickUp", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/airtable/authorize": { + "get": { + "tags": [ + "airtable-oauth" + ], + "summary": "Authorize Airtable", + "description": "Start Airtable OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeAirtable", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/hubspot/authorize": { + "get": { + "tags": [ + "hubspot-oauth" + ], + "summary": "Authorize Hubspot", + "description": "Start HubSpot OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeHubspot", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/linkedin/authorize": { + "get": { + "tags": [ + "linkedin-oauth" + ], + "summary": "Authorize Linkedin", + "description": "Start LinkedIn OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeLinkedIn", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/canva/authorize": { + "get": { + "tags": [ + "canva-oauth" + ], + "summary": "Authorize Canva", + "description": "Start Canva OAuth flow with PKCE\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated, e.g., \"design:meta:read profile:read\")\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeCanva", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/xero/authorize": { + "get": { + "tags": [ + "xero-oauth" + ], + "summary": "Authorize Xero", + "description": "Start Xero OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeXero", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/dropbox/authorize": { + "get": { + "tags": [ + "dropbox-oauth" + ], + "summary": "Authorize Dropbox", + "description": "Start Dropbox OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeDropbox", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/box/authorize": { + "get": { + "tags": [ + "box-oauth" + ], + "summary": "Authorize Box", + "description": "Start Box OAuth 2.0 flow", + "operationId": "authorizeBox", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/quickbooks/authorize": { + "get": { + "tags": [ + "quickbooks-oauth" + ], + "summary": "Authorize Quickbooks", + "description": "Start QuickBooks OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- environment: QuickBooks environment to authorize ('sandbox' default)\n- scope: Optional scopes to request (space-separated). Default is 'com.intuit.quickbooks.accounting'\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeQuickBooks", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "environment", + "in": "query", + "required": false, + "schema": { + "enum": [ + "sandbox", + "production" + ], + "type": "string", + "description": "QuickBooks environment to authorize ('sandbox' or 'production')", + "default": "sandbox", + "title": "Environment" + }, + "description": "QuickBooks environment to authorize ('sandbox' or 'production')" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/zendesk/authorize": { + "get": { + "tags": [ + "zendesk-oauth" + ], + "summary": "Authorize Zendesk", + "description": "Start Zendesk OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes\n- subdomain: Zendesk subdomain for the account being connected", + "operationId": "authorizeZendesk", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "subdomain", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Zendesk subdomain for the account being connected (e.g., 'mycompany' for mycompany.zendesk.com)", + "title": "Subdomain" + }, + "description": "Zendesk subdomain for the account being connected (e.g., 'mycompany' for mycompany.zendesk.com)" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/stripe/authorize": { + "get": { + "tags": [ + "stripe-connect-oauth" + ], + "summary": "Authorize Stripe Connect", + "description": "Start Stripe Connect OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeStripeConnect", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/calcom/authorize": { + "get": { + "tags": [ + "calcom-oauth" + ], + "summary": "Authorize Calcom", + "description": "Start Cal.com OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeCalcom", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/vercel/authorize": { + "get": { + "tags": [ + "vercel-oauth" + ], + "summary": "Authorize Vercel", + "description": "Start Vercel OAuth flow using integration pattern\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- client_slug: Vercel integration slug (required for integration-based OAuth)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeVercel", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "client_slug", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Vercel integration slug (required for integration-based OAuth)", + "title": "Client Slug" + }, + "description": "Vercel integration slug (required for integration-based OAuth)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/pipedrive/authorize": { + "get": { + "tags": [ + "pipedrive-oauth" + ], + "summary": "Authorize Pipedrive", + "description": "Start Pipedrive OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizePipedrive", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/figma/authorize": { + "get": { + "tags": [ + "figma-oauth" + ], + "summary": "Authorize Figma", + "description": "Start Figma OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeFigma", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/klaviyo/authorize": { + "get": { + "tags": [ + "klaviyo-oauth" + ], + "summary": "Authorize Klaviyo", + "operationId": "authorizeKlaviyo", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/pagerduty/authorize": { + "get": { + "tags": [ + "pagerduty-oauth" + ], + "summary": "Authorize Pagerduty", + "description": "Start PagerDuty OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizePagerDuty", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/docusign/authorize": { + "get": { + "tags": [ + "docusign-oauth" + ], + "summary": "Authorize Docusign", + "description": "Start DocuSign OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeDocuSign", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/dialpad/authorize": { + "get": { + "tags": [ + "dialpad-oauth" + ], + "summary": "Authorize Dialpad", + "description": "Start Dialpad OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes\n- code_challenge: PKCE code challenge for enhanced security\n- code_challenge_method: PKCE code challenge method", + "operationId": "authorizeDialpad", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + }, + { + "name": "code_challenge", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "PKCE code challenge for enhanced security", + "title": "Code Challenge" + }, + "description": "PKCE code challenge for enhanced security" + }, + { + "name": "code_challenge_method", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "PKCE code challenge method (default: S256)", + "default": "S256", + "title": "Code Challenge Method" + }, + "description": "PKCE code challenge method (default: S256)" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/shopify/authorize": { + "get": { + "tags": [ + "shopify-oauth" + ], + "summary": "Authorize Shopify", + "description": "Start Shopify OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- shop: Shopify shop domain (e.g., mystore.myshopify.com)\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeShopify", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "shop", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Shopify shop domain (e.g., mystore.myshopify.com)", + "title": "Shop" + }, + "description": "Shopify shop domain (e.g., mystore.myshopify.com)" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/onedrive/authorize": { + "get": { + "tags": [ + "onedrive-oauth" + ], + "summary": "Authorize OneDrive", + "operationId": "authorizeOneDrive", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/onedrive/refresh_token": { + "post": { + "tags": [ + "onedrive-oauth" + ], + "summary": "Refresh Token", + "operationId": "refresh_token_oauth_onedrive_refresh_token_post", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Instance ID for which to refresh the token", + "title": "Instance Id" + }, + "description": "Instance ID for which to refresh the token" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthSuccessResponse" + } + } + } + }, + "400": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthErrorResponse" + } + } + }, + "description": "Bad Request" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/outlook/authorize": { + "get": { + "tags": [ + "outlook-oauth" + ], + "summary": "Authorize Outlook", + "operationId": "authorizeOutlook", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/outlook/refresh_token": { + "post": { + "tags": [ + "outlook-oauth" + ], + "summary": "Refresh Token", + "operationId": "refresh_token_oauth_outlook_refresh_token_post", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Instance ID for which to refresh the token", + "title": "Instance Id" + }, + "description": "Instance ID for which to refresh the token" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthSuccessResponse" + } + } + } + }, + "400": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthErrorResponse" + } + } + }, + "description": "Bad Request" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/teams/authorize": { + "get": { + "tags": [ + "teams-oauth" + ], + "summary": "Authorize Teams", + "operationId": "authorizeTeams", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/teams/refresh_token": { + "post": { + "tags": [ + "teams-oauth" + ], + "summary": "Refresh Token", + "operationId": "refresh_token_oauth_teams_refresh_token_post", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Instance ID for which to refresh the token", + "title": "Instance Id" + }, + "description": "Instance ID for which to refresh the token" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthSuccessResponse" + } + } + } + }, + "400": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AzureADOAuthErrorResponse" + } + } + }, + "description": "Bad Request" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/fathom/authorize": { + "get": { + "tags": [ + "fathom-oauth" + ], + "summary": "Authorize Fathom", + "description": "Start Fathom OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeFathom", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/oauth/monday/authorize": { + "get": { + "tags": [ + "monday-oauth" + ], + "summary": "Authorize Monday", + "description": "Start Monday OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (space-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeMonday", + "parameters": [ + { + "name": "instance_id", + "in": "query", + "required": true, + "schema": { + "type": "string", + "description": "Unique identifier for the client instance requesting authorization", + "title": "Instance Id" + }, + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (space-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (space-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "ApiKeyAuth": { + "properties": { + "token": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Token", + "description": "The API key to save (token field)" + }, + "api_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Api Key", + "description": "The API key to save (api_key field)" + } + }, + "additionalProperties": false, + "type": "object", + "title": "Option 1: API Key" + }, + "AzureADOAuthErrorResponse": { + "properties": { + "error": { + "type": "string", + "title": "Error", + "description": "Error message from the OAuth process" + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message", + "description": "Additional error message" + } + }, + "type": "object", + "required": [ + "error" + ], + "title": "AzureADOAuthErrorResponse" + }, + "AzureADOAuthSuccessResponse": { + "properties": { + "status": { + "type": "string", + "title": "Status", + "description": "Status of the OAuth process", + "default": "success" + }, + "message": { + "type": "string", + "title": "Message", + "description": "Success message" + }, + "data": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Data", + "description": "Additional data related to the response" + } + }, + "type": "object", + "required": [ + "message" + ], + "title": "AzureADOAuthSuccessResponse" + }, + "CallToolRequest": { + "properties": { + "serverUrl": { + "type": "string", + "title": "Serverurl", + "description": "The full URL for connecting to the MCP server" + }, + "toolName": { + "type": "string", + "title": "Toolname", + "description": "The name of the tool to call" + }, + "toolArgs": { + "additionalProperties": true, + "type": "object", + "title": "Toolargs", + "description": "The input parameters for the tool" + }, + "connectionType": { + "$ref": "#/components/schemas/ConnectionType", + "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", + "default": "StreamableHttp" + }, + "headers": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Headers", + "description": "Optional HTTP headers to include when connecting to the server" + } + }, + "type": "object", + "required": [ + "serverUrl", + "toolName" + ], + "title": "CallToolRequest" + }, + "CallToolResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the API call was successful" + }, + "result": { + "anyOf": [ + { + "$ref": "#/components/schemas/CallToolResult" + }, + { + "type": "null" + } + ], + "description": "The result of the tool call, if successful" + }, + "error": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Error", + "description": "Error message, if the tool call failed" + } + }, + "type": "object", + "required": [ + "success" + ], + "title": "CallToolResponse" + }, + "CallToolResult": { + "properties": { + "content": { + "items": {}, + "type": "array", + "title": "Content", + "description": "The content of the tool call" + }, + "isError": { + "type": "boolean", + "title": "Iserror", + "description": "Whether the tool call was successful", + "default": false + } + }, + "type": "object", + "required": [ + "content" + ], + "title": "CallToolResult", + "description": "The server's response to a tool call." + }, + "ConnectionType": { + "type": "string", + "enum": [ + "SSE", + "StreamableHttp" + ], + "title": "ConnectionType" + }, + "CreateSelfHostedServerRequest": { + "properties": { + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." + }, + "userId": { + "type": "string", + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." + } + }, + "type": "object", + "required": [ + "serverName", + "userId" + ], + "title": "CreateSelfHostedServerRequest" + }, + "CreateSelfHostedServerResponse": { + "properties": { + "instanceId": { + "type": "string", + "title": "Instanceid", + "description": "The unique identifier for this specific server connection integration instance." + }, + "oauthUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Oauthurl", + "description": "The OAuth authorization URL for the specified server, if OAuth is configured." + } + }, + "type": "object", + "required": [ + "instanceId" + ], + "title": "CreateSelfHostedServerResponse" + }, + "CreateServerRequest": { + "properties": { + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." + }, + "userId": { + "type": "string", + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." + }, + "platformName": { + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "null" + } + ], + "title": "Platformname", + "description": "The name of the platform associated with the user. Optional." + }, + "connectionType": { + "$ref": "#/components/schemas/ConnectionType", + "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", + "default": "StreamableHttp" + }, + "legacy": { + "type": "boolean", + "title": "Legacy", + "description": "Whether to use the legacy server. Default is False.", + "default": false + } + }, + "type": "object", + "required": [ + "serverName", + "userId" + ], + "title": "CreateServerRequest" + }, + "CreateServerResponse": { + "properties": { + "serverUrl": { + "type": "string", + "title": "Serverurl", + "description": "The full URL for connecting to the MCP server, including the instance ID." + }, + "instanceId": { + "type": "string", + "title": "Instanceid", + "description": "The unique identifier for this specific server connection integration instance." + }, + "oauthUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Oauthurl", + "description": "The OAuth URL for authentication if available, supports white label if configured." + } + }, + "type": "object", + "required": [ + "serverUrl", + "instanceId" + ], + "title": "CreateServerResponse" + }, + "CreateWhiteLabelingRequest": { + "properties": { + "client_id": { + "type": "string", + "title": "Client Id", + "description": "OAuth client ID" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "OAuth client secret" + }, + "server_name": { + "$ref": "#/components/schemas/OAuthServerName", + "description": "Optional. The name of the server" + }, + "callback_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Callback Url", + "description": "Optional. OAuth callback URL" + }, + "account_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Account Id", + "description": "Optional. The UUID of the account" + } + }, + "type": "object", + "required": [ + "client_id", + "client_secret", + "server_name" + ], + "title": "CreateWhiteLabelingRequest" + }, + "DeleteUserResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "message": { + "type": "string", + "title": "Message" + } + }, + "type": "object", + "required": [ + "success", + "message" + ], + "title": "DeleteUserResponse" + }, + "ExternalServerInfo": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "The name of the external server" + }, + "url": { + "type": "string", + "title": "Url", + "description": "The URL of the external MCP server" + }, + "headers": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Headers", + "description": "Optional HTTP headers used when connecting to the external server" + } + }, + "type": "object", + "required": [ + "name", + "url" + ], + "title": "ExternalServerInfo" + }, + "ExternalServerRequest": { + "properties": { + "name": { + "type": "string", + "minLength": 1, + "title": "Name", + "description": "The name of the external server" + }, + "url": { + "type": "string", + "minLength": 1, + "title": "Url", + "description": "The URL of the external MCP server" + }, + "headers": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Headers", + "description": "Optional HTTP headers to include when connecting to the external server (e.g., for bearer authentication)" + } + }, + "type": "object", + "required": [ + "name", + "url" + ], + "title": "ExternalServerRequest" + }, + "GeneralAuth": { + "properties": { + "data": { + "additionalProperties": true, + "type": "object", + "title": "Data", + "description": "Any other general authentication data to save" + } + }, + "additionalProperties": false, + "type": "object", + "title": "Option 2: General Auth Data" + }, + "GetAllUsersResponse": { + "properties": { + "users": { + "items": { + "$ref": "#/components/schemas/UserInfo" + }, + "type": "array", + "title": "Users", + "description": "List of users" + }, + "totalCount": { + "type": "integer", + "title": "Totalcount", + "description": "Total number of users across all pages" + }, + "page": { + "type": "integer", + "title": "Page", + "description": "Current page number" + }, + "pageSize": { + "type": "integer", + "title": "Pagesize", + "description": "Number of results per page" + }, + "totalPages": { + "type": "integer", + "title": "Totalpages", + "description": "Total number of pages" + } + }, + "type": "object", + "required": [ + "users", + "totalCount", + "page", + "pageSize", + "totalPages" + ], + "title": "GetAllUsersResponse" + }, + "GetAuthDataResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the request was successful" + }, + "authData": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "GetAuthDataResponseAuthData", + "description": "Complete authentication data including access token, refresh token, scope, expiration, and platform-specific data" + }, + "error": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Error", + "description": "Error message if the request failed" + } + }, + "type": "object", + "required": [ + "success" + ], + "title": "GetAuthDataResponse" + }, + "GetInstanceResponse": { + "properties": { + "instanceId": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Instanceid", + "description": "The unique identifier of the connection integration instance." + }, + "authNeeded": { + "type": "boolean", + "title": "Authneeded", + "description": "Indicates whether authentication is required for this server instance.", + "default": false + }, + "isAuthenticated": { + "type": "boolean", + "title": "Isauthenticated", + "description": "Indicates whether the instance is authenticated successfully.", + "default": false + }, + "serverName": { + "type": "string", + "title": "Servername", + "description": "The name of the MCP server associated with the instance.", + "default": "" + }, + "platform": { + "type": "string", + "title": "Platform", + "description": "The platform associated with the instance.", + "default": "" + }, + "externalUserId": { + "type": "string", + "title": "Externaluserid", + "description": "The user's identifier on the external platform.", + "default": "" + }, + "oauthUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Oauthurl", + "description": "The OAuth URL for authentication if available, supports white label if configured." + } + }, + "type": "object", + "title": "GetInstanceResponse" + }, + "GetMcpServersResponse": { + "properties": { + "servers": { + "items": { + "$ref": "#/components/schemas/McpServer" + }, + "type": "array", + "title": "Servers" + } + }, + "type": "object", + "required": [ + "servers" + ], + "title": "GetMcpServersResponse" + }, + "GetUserAuthResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "userId": { + "type": "string", + "title": "Userid", + "description": "The user identifier" + }, + "serverName": { + "type": "string", + "title": "Servername", + "description": "The name of the server" + }, + "authData": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "GetUserAuthResponseAuthData", + "description": "The authentication data if available" + }, + "isAuthenticated": { + "type": "boolean", + "title": "Isauthenticated", + "description": "Whether the server has authentication data configured" + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message" + } + }, + "type": "object", + "required": [ + "success", + "userId", + "serverName", + "isAuthenticated" + ], + "title": "GetUserAuthResponse" + }, + "GetUserIntegrationsResponse": { + "properties": { + "integrations": { + "items": { + "$ref": "#/components/schemas/McpServerName" + }, + "type": "array", + "title": "Integrations" + } + }, + "type": "object", + "required": [ + "integrations" + ], + "title": "GetUserIntegrationsResponse" + }, + "GetUserResponse": { + "properties": { + "userId": { + "type": "string", + "title": "Userid" + }, + "createdAt": { + "type": "string", + "title": "Createdat" + }, + "lastUsedAt": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Lastusedat" + } + }, + "type": "object", + "required": [ + "userId", + "createdAt" + ], + "title": "GetUserResponse" + }, + "HTTPValidationError": { + "properties": { + "detail": { + "items": { + "$ref": "#/components/schemas/ValidationError" + }, + "type": "array", + "title": "Detail" + } + }, + "type": "object", + "title": "HTTPValidationError" + }, + "ListToolsRequest": { + "properties": { + "serverUrl": { + "type": "string", + "title": "Serverurl", + "description": "The full URL for connecting to the MCP server" + }, + "connectionType": { + "$ref": "#/components/schemas/ConnectionType", + "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", + "default": "StreamableHttp" + }, + "format": { + "$ref": "#/components/schemas/ToolFormat", + "description": "The format to return tools in. Default is MCP Native format for maximum compatibility.", + "default": "mcp_native" + }, + "headers": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Headers", + "description": "Optional HTTP headers to include when connecting to the server" + } + }, + "type": "object", + "required": [ + "serverUrl" + ], + "title": "ListToolsRequest" + }, + "ListToolsResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the list tools request was successful" + }, + "tools": { + "anyOf": [ + { + "items": {}, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tools", + "description": "List of tools in the requested format" + }, + "format": { + "$ref": "#/components/schemas/ToolFormat", + "description": "The format of the returned tools" + }, + "error": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Error", + "description": "Error message, if the request failed" + } + }, + "type": "object", + "required": [ + "success", + "format" + ], + "title": "ListToolsResponse" + }, + "McpServer": { + "properties": { + "id": { + "type": "string", + "format": "uuid", + "title": "Id" + }, + "name": { + "type": "string", + "title": "Name" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description" + }, + "tools": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ServerTool" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tools" + }, + "authNeeded": { + "type": "boolean", + "title": "Authneeded", + "default": true + } + }, + "type": "object", + "required": [ + "id", + "name" + ], + "title": "McpServer" + }, + "McpServerName": { + "type": "string", + "enum": [ + "Affinity", + "Airtable", + "Asana", + "Attio", + "Box", + "Brave Search", + "Cal.com", + "ClickUp", + "Close", + "Cloudflare", + "Coinbase", + "Confluence", + "Dialpad", + "Discord", + "Doc2markdown", + "DocuSign", + "Dropbox", + "ElevenLabs", + "Exa", + "Fathom", + "Figma", + "Fireflies", + "Firecrawl Deep Research", + "Firecrawl Web Search", + "Freshdesk", + "GitHub", + "GitLab", + "Gmail", + "Gong", + "Google Calendar", + "Google Docs", + "Google Drive", + "Google Jobs", + "Google Sheets", + "Hacker News", + "Heygen", + "HubSpot", + "Jira", + "Klaviyo", + "Klavis ReportGen", + "Linear", + "LinkedIn", + "Markdown2doc", + "Mem0", + "Microsoft Teams", + "Mixpanel", + "Monday", + "Moneybird", + "Motion", + "Notion", + "OneDrive", + "Openrouter", + "Outlook", + "PagerDuty", + "Pipedrive", + "Plai", + "Postgres", + "PostHog", + "QuickBooks", + "Resend", + "Salesforce", + "SendGrid", + "Shopify", + "Slack", + "Stripe", + "Supabase", + "Tavily", + "Vercel", + "WhatsApp", + "WordPress", + "YouTube", + "Zendesk", + "ServiceNow" + ], + "title": "McpServerName" + }, + "OAuthServerName": { + "type": "string", + "enum": [ + "Airtable", + "Asana", + "Attio", + "Box", + "Cal.com", + "Canva", + "ClickUp", + "Close", + "Confluence", + "Dialpad", + "DocuSign", + "Dropbox", + "Fathom", + "Figma", + "GitHub", + "GitLab", + "Gmail", + "Google Calendar", + "Google Docs", + "Google Drive", + "Google Sheets", + "HubSpot", + "Jira", + "Klaviyo", + "Linear", + "LinkedIn", + "Microsoft Teams", + "Monday", + "Moneybird", + "Notion", + "Onedrive", + "Outlook", + "PagerDuty", + "Pipedrive", + "QuickBooks", + "Salesforce", + "Shopify", + "Slack", + "Stripe", + "Supabase", + "Vercel", + "WordPress", + "Xero", + "Zendesk" + ], + "title": "OAuthServerName" + }, + "ServerTool": { + "properties": { + "name": { + "type": "string", + "title": "Name" + }, + "description": { + "type": "string", + "title": "Description" + } + }, + "type": "object", + "required": [ + "name", + "description" + ], + "title": "ServerTool" + }, + "SetAuthRequest": { + "properties": { + "instanceId": { + "type": "string", + "title": "Instanceid", + "description": "The unique identifier for the connection instance" + }, + "authData": { + "anyOf": [ + { + "$ref": "#/components/schemas/ApiKeyAuth" + }, + { + "$ref": "#/components/schemas/GeneralAuth" + } + ], + "title": "SetAuthRequestAuthData", + "description": "Authentication data" + } + }, + "type": "object", + "required": [ + "instanceId", + "authData" + ], + "title": "SetAuthRequest" + }, + "SetUserAuthRequest": { + "properties": { + "userId": { + "type": "string", + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user" + }, + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the MCP server to set authentication for (e.g., 'GitHub', 'Jira')" + }, + "authData": { + "anyOf": [ + { + "$ref": "#/components/schemas/ApiKeyAuth" + }, + { + "$ref": "#/components/schemas/GeneralAuth" + } + ], + "title": "SetUserAuthRequestAuthData", + "description": "Authentication data" + } + }, + "type": "object", + "required": [ + "userId", + "serverName", + "authData" + ], + "title": "SetUserAuthRequest" + }, + "StatusResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message" + } + }, + "type": "object", + "required": [ + "success" + ], + "title": "StatusResponse" + }, + "StrataAddServersRequest": { + "properties": { + "strataId": { + "type": "string", + "minLength": 1, + "title": "Strataid", + "description": "The strata server ID" + }, + "servers": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/McpServerName" + }, + "type": "array" + }, + { + "type": "string", + "const": "ALL" + }, + { + "type": "null" + } + ], + "title": "Servers", + "description": "List of Klavis integration to add (e.g., 'jira', 'linear'), 'ALL' to add all Klavis integration, or null to add no servers." + }, + "externalServers": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ExternalServerRequest" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Externalservers", + "description": "Optional list of external MCP servers to add with their URLs. Each server will be validated before being added." + } + }, + "type": "object", + "required": [ + "strataId" + ], + "title": "StrataAddServersRequest" + }, + "StrataAddServersResponse": { + "properties": { + "addedServers": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Addedservers", + "description": "List of Klavis integration that were added" + }, + "addedExternalServers": { + "items": { + "$ref": "#/components/schemas/ExternalServerInfo" + }, + "type": "array", + "title": "Addedexternalservers", + "description": "List of external MCP servers that were added with name and URL" + }, + "oauthUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Oauthurls", + "description": "Map of connected integration to OAuth URL, supports white labeling if configured" + }, + "apiKeyUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Apikeyurls", + "description": "Map of connected integration to API key setup URL" + } + }, + "type": "object", + "required": [ + "addedServers" + ], + "title": "StrataAddServersResponse" + }, + "StrataCreateRequest": { + "properties": { + "userId": { + "type": "string", + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." + }, + "servers": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/McpServerName" + }, + "type": "array" + }, + { + "type": "string", + "const": "ALL" + }, + { + "type": "null" + } + ], + "title": "Servers", + "description": "List of Klavis MCP servers to enable (e.g., 'jira', 'linear'), 'ALL' to add all Klavis MCP servers, or null to add no servers." + }, + "externalServers": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ExternalServerRequest" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Externalservers", + "description": "Optional list of external MCP servers to add with their URLs. Each server will be validated before being added." + }, + "enableAuthHandling": { + "type": "boolean", + "title": "Enableauthhandling", + "description": "Whether to enable authentication handling. Default is True.", + "default": true + } + }, + "type": "object", + "required": [ + "userId" + ], + "title": "StrataCreateRequest" + }, + "StrataCreateResponse": { + "properties": { + "strataServerUrl": { + "type": "string", + "title": "Strataserverurl", + "description": "URL to connect to the Strata MCP server" + }, + "strataId": { + "type": "string", + "title": "Strataid", + "description": "The strata server ID" + }, + "addedServers": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Addedservers", + "description": "List of Klavis integration that were added" + }, + "addedExternalServers": { + "items": { + "$ref": "#/components/schemas/ExternalServerInfo" + }, + "type": "array", + "title": "Addedexternalservers", + "description": "List of external MCP servers that were added with name and URL" + }, + "oauthUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Oauthurls", + "description": "Map of connected integration to OAuth URL, supports white labeling if configured" + }, + "apiKeyUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Apikeyurls", + "description": "Map of connected integration to API key setup URL" + } + }, + "type": "object", + "required": [ + "strataServerUrl", + "strataId", + "addedServers" + ], + "title": "StrataCreateResponse" + }, + "StrataDeleteServersResponse": { + "properties": { + "deletedServers": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Deletedservers", + "description": "List of integration that were deleted" + }, + "deletedExternalServers": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Deletedexternalservers", + "description": "List of external MCP server names that were deleted" + } + }, + "type": "object", + "required": [ + "deletedServers", + "deletedExternalServers" + ], + "title": "StrataDeleteServersResponse" + }, + "StrataGetAuthResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "serverName": { + "type": "string", + "title": "Servername", + "description": "The name of the server" + }, + "authData": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Authdata", + "description": "The authentication data if available" + }, + "isAuthenticated": { + "type": "boolean", + "title": "Isauthenticated", + "description": "Whether the server has authentication data configured" + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message" + } + }, + "type": "object", + "required": [ + "success", + "serverName", + "isAuthenticated" + ], + "title": "StrataGetAuthResponse" + }, + "StrataGetResponse": { + "properties": { + "strataServerUrl": { + "type": "string", + "title": "Strataserverurl", + "description": "URL to connect to the Strata MCP server" + }, + "strataId": { + "type": "string", + "title": "Strataid", + "description": "The strata server ID" + }, + "connectedServers": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Connectedservers", + "description": "List of available integration currently connected to this strata" + }, + "connectedExternalServers": { + "items": { + "$ref": "#/components/schemas/ExternalServerInfo" + }, + "type": "array", + "title": "Connectedexternalservers", + "description": "List of external servers with name and URL currently connected to this strata" + }, + "oauthUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Oauthurls", + "description": "Map of connected integration to OAuth URL, supports white labeling if configured" + }, + "apiKeyUrls": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Apikeyurls", + "description": "Map of connected integration to API key setup URL" + } + }, + "type": "object", + "required": [ + "strataServerUrl", + "strataId", + "connectedServers", + "connectedExternalServers" + ], + "title": "StrataGetResponse" + }, + "StrataRawActionsResponse": { + "properties": { + "strataId": { + "type": "string", + "title": "Strataid", + "description": "The strata server ID" + }, + "server": { + "type": "string", + "title": "Server", + "description": "The requested server name" + }, + "result": { + "additionalProperties": true, + "type": "object", + "title": "Result", + "description": "Map of categories to raw actions" + } + }, + "type": "object", + "required": [ + "strataId", + "server" + ], + "title": "StrataRawActionsResponse" + }, + "StrataSetAuthRequest": { + "properties": { + "strataId": { + "type": "string", + "title": "Strataid", + "description": "The strata server ID" + }, + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the Klavis MCP server to set authentication for (e.g., 'GitHub', 'Jira')" + }, + "authData": { + "anyOf": [ + { + "$ref": "#/components/schemas/ApiKeyAuth" + }, + { + "$ref": "#/components/schemas/GeneralAuth" + } + ], + "title": "Authdata", + "description": "Authentication data" + } + }, + "type": "object", + "required": [ + "strataId", + "serverName", + "authData" + ], + "title": "StrataSetAuthRequest" + }, + "ToolFormat": { + "type": "string", + "enum": [ + "openai", + "anthropic", + "gemini", + "mcp_native" + ], + "title": "ToolFormat" + }, + "UserInfo": { + "properties": { + "userId": { + "type": "string", + "title": "Userid", + "description": "The external user ID" + }, + "createdAt": { + "type": "string", + "title": "Createdat", + "description": "The timestamp when the user was created" + } + }, + "type": "object", + "required": [ + "userId", + "createdAt" + ], + "title": "UserInfo" + }, + "ValidationError": { + "properties": { + "loc": { + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "type": "array", + "title": "Location" + }, + "msg": { + "type": "string", + "title": "Message" + }, + "type": { + "type": "string", + "title": "Error Type" + } + }, + "type": "object", + "required": [ + "loc", + "msg", + "type" + ], + "title": "ValidationError" + }, + "WhiteLabelingResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the operation was successful" + }, + "data": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Data", + "description": "The white labeling data if successful" + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message", + "description": "Error message if unsuccessful" + } + }, + "type": "object", + "required": [ + "success" + ], + "title": "WhiteLabelingResponse" + } + }, + "securitySchemes": { + "HTTPBearer": { + "type": "http", + "description": "Your Klavis AI API key.", + "scheme": "bearer", + "x-fern-bearer": { + "name": "api_key" + } + } + } + } +} \ No newline at end of file diff --git a/docs/api-reference/strata/add.mdx b/docs/api-reference/strata/add.mdx new file mode 100644 index 00000000..46268481 --- /dev/null +++ b/docs/api-reference/strata/add.mdx @@ -0,0 +1,5 @@ +--- +openapi: post /mcp-server/strata/add +--- + + diff --git a/docs/api-reference/strata/create.mdx b/docs/api-reference/strata/create.mdx new file mode 100644 index 00000000..29173691 --- /dev/null +++ b/docs/api-reference/strata/create.mdx @@ -0,0 +1,5 @@ +--- +openapi: post /mcp-server/strata/create +--- + + diff --git a/docs/api-reference/strata/delete-strata-auth.mdx b/docs/api-reference/strata/delete-strata-auth.mdx new file mode 100644 index 00000000..168ca4db --- /dev/null +++ b/docs/api-reference/strata/delete-strata-auth.mdx @@ -0,0 +1,4 @@ +--- +openapi: delete /mcp-server/strata/{strataId}/auth/{serverName} +--- + diff --git a/docs/api-reference/strata/delete.mdx b/docs/api-reference/strata/delete.mdx new file mode 100644 index 00000000..50a090c3 --- /dev/null +++ b/docs/api-reference/strata/delete.mdx @@ -0,0 +1,4 @@ +--- +openapi: delete /mcp-server/strata/{strataId}/servers +--- + diff --git a/docs/api-reference/strata/get-strata-auth.mdx b/docs/api-reference/strata/get-strata-auth.mdx new file mode 100644 index 00000000..a7bf92e2 --- /dev/null +++ b/docs/api-reference/strata/get-strata-auth.mdx @@ -0,0 +1,4 @@ +--- +openapi: get /mcp-server/strata/{strataId}/auth/{serverName} +--- + diff --git a/docs/api-reference/strata/get.mdx b/docs/api-reference/strata/get.mdx new file mode 100644 index 00000000..2f991b23 --- /dev/null +++ b/docs/api-reference/strata/get.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/strata/{strataId} +--- diff --git a/docs/api-reference/strata/list-raw-actions.mdx b/docs/api-reference/strata/list-raw-actions.mdx new file mode 100644 index 00000000..5bfb041f --- /dev/null +++ b/docs/api-reference/strata/list-raw-actions.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /mcp-server/strata/{strataId}/raw-actions +--- \ No newline at end of file diff --git a/docs/api-reference/strata/set-strata-auth.mdx b/docs/api-reference/strata/set-strata-auth.mdx new file mode 100644 index 00000000..2c56ade8 --- /dev/null +++ b/docs/api-reference/strata/set-strata-auth.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /mcp-server/strata/set-auth +--- \ No newline at end of file diff --git a/docs/api-reference/user/delete-user-auth.mdx b/docs/api-reference/user/delete-user-auth.mdx new file mode 100644 index 00000000..1d06bf7a --- /dev/null +++ b/docs/api-reference/user/delete-user-auth.mdx @@ -0,0 +1,4 @@ +--- +openapi: delete /user/{userId}/auth/{serverName} +--- + diff --git a/docs/api-reference/user/delete-user.mdx b/docs/api-reference/user/delete-user.mdx new file mode 100644 index 00000000..f55faf78 --- /dev/null +++ b/docs/api-reference/user/delete-user.mdx @@ -0,0 +1,5 @@ +--- +openapi: delete /user/{userId} +--- + + diff --git a/docs/api-reference/user/get-all-users.mdx b/docs/api-reference/user/get-all-users.mdx new file mode 100644 index 00000000..8488054b --- /dev/null +++ b/docs/api-reference/user/get-all-users.mdx @@ -0,0 +1,4 @@ +--- +openapi: get /user/ +--- + diff --git a/docs/api-reference/user/get-user-auth.mdx b/docs/api-reference/user/get-user-auth.mdx new file mode 100644 index 00000000..d1036ceb --- /dev/null +++ b/docs/api-reference/user/get-user-auth.mdx @@ -0,0 +1,5 @@ +--- +openapi: get /user/{userId}/auth/{serverName} +--- + + diff --git a/docs/api-reference/user/get-user-integrations.mdx b/docs/api-reference/user/get-user-integrations.mdx new file mode 100644 index 00000000..9897a1b3 --- /dev/null +++ b/docs/api-reference/user/get-user-integrations.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /user/{userId}/integrations +--- \ No newline at end of file diff --git a/docs/api-reference/user/get-user.mdx b/docs/api-reference/user/get-user.mdx new file mode 100644 index 00000000..0378afe1 --- /dev/null +++ b/docs/api-reference/user/get-user.mdx @@ -0,0 +1,4 @@ +--- +openapi: get /user/{userId} +--- + diff --git a/docs/api-reference/user/set-user-auth.mdx b/docs/api-reference/user/set-user-auth.mdx new file mode 100644 index 00000000..484c86c5 --- /dev/null +++ b/docs/api-reference/user/set-user-auth.mdx @@ -0,0 +1,4 @@ +--- +openapi: post /user/set-auth +--- + diff --git a/docs/api-reference/white-labeling/create.mdx b/docs/api-reference/white-labeling/create.mdx new file mode 100644 index 00000000..3267e693 --- /dev/null +++ b/docs/api-reference/white-labeling/create.mdx @@ -0,0 +1,3 @@ +--- +openapi: post /white-labeling/create +--- \ No newline at end of file diff --git a/docs/api-reference/white-labeling/get.mdx b/docs/api-reference/white-labeling/get.mdx new file mode 100644 index 00000000..3deb0ebd --- /dev/null +++ b/docs/api-reference/white-labeling/get.mdx @@ -0,0 +1,3 @@ +--- +openapi: get /white-labeling/get/{client_id} +--- \ No newline at end of file diff --git a/docs/auth/api-key.mdx b/docs/auth/api-key.mdx new file mode 100644 index 00000000..5611bd46 --- /dev/null +++ b/docs/auth/api-key.mdx @@ -0,0 +1,14 @@ +--- +title: 'API Key' +description: +--- + +## MCP Servers that require API Key Authentication + +The following MCP servers use API key authentication and do not support OAuth: + +- **Cloudflare** - Requires Cloudflare API token +- **Discord** - Requires Discord bot token +- **ElevenLabs** - Requires ElevenLabs API key +- **PostHog** - Requires PostHog API key +- **SendGrid** - Requires SendGrid API key \ No newline at end of file diff --git a/docs/auth/oauth.mdx b/docs/auth/oauth.mdx new file mode 100644 index 00000000..d8ea39ce --- /dev/null +++ b/docs/auth/oauth.mdx @@ -0,0 +1,56 @@ +--- +title: 'OAuth' +description: +--- + + +## What is OAuth? + +OAuth (Open Authorization) is an open standard protocol that allows third-party applications to access resources on behalf of users without exposing their credentials. Klavis AI implements OAuth 2.0 to securely connect with services like GitHub, Slack, Gmail, Notion, and more. + + +## MCP Servers that support OAuth Authentication + + +For detailed OAuth scope and how to create your own OAuth app, see our [OAuth knowledge](/knowledge-base/oauth_app/oauth-scopes). + + +The following MCP servers support OAuth authentication: + +- **Airtable** - OAuth 2.0 integration +- **Asana** - OAuth 2.0 integration +- **Attio** - OAuth 2.0 integration +- **Box** - OAuth 2.0 integration +- **Cal.com** - OAuth 2.0 integration +- **Canva** - OAuth 2.0 integration +- **ClickUp** - OAuth 2.0 integration +- **Close** - OAuth 2.0 integration +- **Confluence** - OAuth 2.0 integration +- **Dialpad** - OAuth 2.0 integration +- **DocuSign** - OAuth 2.0 integration +- **Dropbox** - OAuth 2.0 integration +- **Figma** - OAuth 2.0 integration +- **GitHub** - OAuth 2.0 integration +- **GitLab** - OAuth 2.0 integration +- **Gmail** - OAuth 2.0 integration +- **Google Calendar** - OAuth 2.0 integration +- **Google Docs** - OAuth 2.0 integration +- **Google Drive** - OAuth 2.0 integration +- **Google Sheets** - OAuth 2.0 integration +- **HubSpot** - OAuth 2.0 integration +- **Jira** - OAuth 2.0 integration +- **Klaviyo** - OAuth 2.0 integration +- **Linear** - OAuth 2.0 integration +- **LinkedIn** - OAuth 2.0 integration +- **Notion** - OAuth 2.0 integration +- **PagerDuty** - OAuth 2.0 integration +- **Pipedrive** - OAuth 2.0 integration +- **QuickBooks** - OAuth 2.0 integration +- **Salesforce** - OAuth 2.0 integration +- **Slack** - OAuth 2.0 integration +- **Stripe Connect** - OAuth 2.0 integration +- **Supabase** - OAuth 2.0 integration +- **Vercel** - OAuth 2.0 integration +- **WordPress** - OAuth 2.0 integration +- **Xero** - OAuth 2.0 integration +- **Zendesk** - OAuth 2.0 integration \ No newline at end of file diff --git a/docs/auth/white-label.mdx b/docs/auth/white-label.mdx new file mode 100644 index 00000000..b70a4f38 --- /dev/null +++ b/docs/auth/white-label.mdx @@ -0,0 +1,161 @@ +--- +title: 'White-label' +description: 'White-label allows you to integrate our OAuth flows with your own branding and custom OAuth applications' +--- + + +For detailed OAuth scope and how to create your own OAuth app, see our [OAuth knowledge](/knowledge-base/oauth_app/oauth-scopes). + + +## What is White-label? + +White-label allows you to customize the authentication experience with your own branding. When enabled, users will see your application name, logo, and other brand elements during the OAuth flow instead of Klavis AI's. + + + White-label OAuth consent screen example + + +## OAuth Flow (w/ white-label) + +```mermaid +sequenceDiagram + actor EndUser + participant ClientApp as Your App + participant KlavisAI as Klavis AI + participant ThirdPartyIdP as Third-party App + + %% --- OAuth Flow Initiation --- + EndUser->>ClientApp: 1. Initiates action in Your App
(e.g., "Connect my Account"). + ClientApp->>KlavisAI: 2. Requests Klavis AI
to start OAuth flow. + + activate KlavisAI + Note over KlavisAI: Klavis AI retrieves the Your App's Config + KlavisAI->>EndUser: 3. Redirects End-User's browser to
Third-party authorization endpoint. + deactivate KlavisAI + activate EndUser + + EndUser->>ThirdPartyIdP: 4. Browser navigates to Third-party authorization URL. + activate ThirdPartyIdP + ThirdPartyIdP-->>EndUser: 5. Third-party App presents its Login & Consent screen with Your App Info and Logo. + + EndUser->>ThirdPartyIdP: 6. End-User authenticates with Third-party App & grants consent. + deactivate EndUser + ThirdPartyIdP-->>KlavisAI: 7. Third-party App redirects
Klavis AI callback URI. + deactivate ThirdPartyIdP + + %% --- Token Exchange (Klavis AI with Third-party) --- + KlavisAI->>ThirdPartyIdP: 8. Klavis AI exchanges
Authorization Code for Access Token. + activate ThirdPartyIdP + ThirdPartyIdP-->>KlavisAI: 9. Third-party returns
Access/Refresh Token to Klavis AI. + deactivate ThirdPartyIdP + + KlavisAI->>EndUser: 10. OAuth White Labeling succeeds. +``` + + +## Implementation + +### Setting Up White-label + +To set up white-label for your OAuth integrations: + + + + Register your application with the third-party service (GitHub, Slack, etc.) to obtain your client ID and client secret. + + + Go to the Klavis AI white label configuration page: + + [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) + + + White Label Configuration UI + + + + Make sure to add the callback url to your app's OAuth allow list. + + + + Redirect users to the Klavis AI OAuth authorization endpoint with your client ID: + + + + ```javascript without SDK + // Example: Initiating GitHub OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/github/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating GitHub OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Github, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating GitHub OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GITHUB, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + + You can also specify scope and redirect_url in the authUrl, check the api reference for more details. + + + + For simplicity, you can use the default callback URL: https://api.klavis.ai/oauth/{server_name}/callback, +as shown in the previous step. This is the endpoint where we handle user credentials for authentication. + +However, some OAuth consent screens (such as GitHub's) display the callback URL to the user. If this URL doesn't match your application's domain, it can appear untrustworthy. + +To address this, you can use a callback URL under your own domain and set up a redirect—either via DNS or within your application—that forwards requests to: +https://api.klavis.ai/oauth/{server_name}/callback. Below is an example using FastAPI to simply redirect from your Github oauth application to Klavis oauth service in python - + +```python +@app.get("/github/redirect") +async def redirect_to_jira_callback(request: Request): + target_url = "https://api.klavis.ai/oauth/github/callback" + + query_params = request.query_params + if query_params: + query_string = str(query_params) + target_url = f"{target_url}?{query_string}" + + return RedirectResponse(url=target_url) +``` + + + + + +For technical assistance with OAuth implementation or white-label, please join our Discord community. diff --git a/docs/concepts/mcp.mdx b/docs/concepts/mcp.mdx new file mode 100644 index 00000000..a088b6c8 --- /dev/null +++ b/docs/concepts/mcp.mdx @@ -0,0 +1,100 @@ +--- +title: "MCP" +description: "Understanding the Model Context Protocol and how it enables AI agents to interact with external systems" +--- + +# What is Model Context Protocol (MCP)? + +The **Model Context Protocol (MCP)** is an open standard that unifies how AI models communicate with external tools and services. It acts as a bridge between Large Language Models (LLMs) and the real world, enabling AI agents to interact with databases, APIs, files, and other systems in a standardized way. + + + MCP Architecture Overview + + +## How MCP Works + +MCP follows a simple **client-server architecture**: + +- **MCP Client**: The AI assistant or application (like Claude Desktop, Cursor, or your custom AI agent) +- **MCP Server**: A specialized backend that exposes tools to interact with external systems +- **Tools**: Individual functions that the AI can call to perform specific actions + +```mermaid +graph LR + A[AI Agent] -->|Request| B[MCP Client] + B -->|Tool Call| C[MCP Server] + C -->|API Call| D[External Service] + D -->|Response| C + C -->|Result| B + B -->|Context| A +``` + +## Key Benefits + + + + One protocol for all external integrations - no more custom, one-off connections + + + Access to 100+ pre-built tools across CRM, productivity, and development platforms + + + Built-in OAuth flows and secure credential management + + + Production-ready hosted infrastructure with self-hosting options + + + +## MCP Server Types + +### Hosted MCP Servers (Recommended) +Klavis AI provides **production-ready hosted MCP servers** that eliminate setup complexity: + +- No infrastructure management +- Built-in OAuth authentication +- Automatic updates and maintenance +- 99.9% uptime SLA + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-api-key") +server = klavis.mcp_server.create_strata_server(...) +``` + +### Self-Hosted MCP Servers +For custom requirements or on-premises deployments: + +```bash +# Run any MCP server with Docker +docker run -p 5000:5000 ghcr.io/klavis-ai/gmail-mcp-server:latest +``` + +## Available MCP Integrations + +Klavis AI offers 100+ MCP servers across different categories: + + Browse our complete catalog of MCP server integrations + + +## Getting Started + + + Get up and running in under 5 minutes + + + +## Learn More + + + + Read the official Model Context Protocol specification + + + Explore our open-source repo + + + Join our developer community + + diff --git a/docs/concepts/strata.mdx b/docs/concepts/strata.mdx new file mode 100644 index 00000000..39fd2b7c --- /dev/null +++ b/docs/concepts/strata.mdx @@ -0,0 +1,141 @@ +--- +title: "Strata" +description: "One MCP server for AI agents to use tools progressively at any scale" +--- + +Strata Hero - Progressive tool discovery for AI agents + +# What is Strata? + +**Strata** is one MCP server that guides AI agents use tools reliably at any complexity, instead of overwhelming them with everything at once, it was designed by thinking human interacting with tools, solving the three major problems that plague AI agents today: + +- **Tool Overload**: Too many tools cause LLM choice paralysis +- **Context Overload**: Long tool lists blow up token counts and costs +- **Coverage Gap**: Most servers are stuck at 40~50 tools, limiting what you can build + +You can use Strata via our [website](https://www.klavis.ai/home/mcp-servers), [API](https://www.klavis.ai/docs/api-reference/strata/create), or even [open source](https://github.com/Klavis-AI/klavis) on your own data! + + +## Video Tutorial +Watch this video tutorial for a complete understanding of how Strata works: + + + +## Text Tutorial + + +Check out this [shared Claude conversation](https://claude.ai/share/9b44a192-9f2d-46e2-a875-ef905c457070) to see Strata in action! + + +### 1. Discover Server Categories or Actions +**`discover_server_categories_or_actions`** - find relevant categories or actions based on user intent. No semantic search! + + + **Description**: **PREFERRED STARTING POINT**. Discover available categories or actions based on user query. Try this tool first when exploring what actions are available across servers. This is the primary entry point for exploring available actions and should be used before other search methods. The output will be a list of servers with detail level and details. + + If detail level is 'categories_only', the details will be a list of category names only. Next step prefer to use get_category_actions tool to get the actions for the categories. + + If detail level is 'full_details', the details will be a list of category names with their actions details included. This happens when the server has only a few actions. Next step prefer to use execute_action tool to execute the actions. + + If detail level is 'categories_and_actions', the details will be a list of category names and action names. This happens when using external tools. Next step prefer to use get_action_details tool to get the details of the actions. + + **Parameters**: + - `user_query` (string, required): Natural language user query to filter results. + - `server_names` (array, required): List of server names to discover categories or actions. + + +### 2. Get Category Actions +**`get_category_actions`** - retrieve all action names within specified categories. + + + **Description**: Get a comprehensive overview of API actions available within specific categories. Use this tool if you want to explore what actions are available in particular service categories or get a detailed view of category capabilities. ** Important **: It should only be called after you get the server categories from the discover_server_categories tool. + + **Parameters**: + - `category_names` (array, required): List of categories to get actions for + + +### 3. Get Action Details +**`get_action_details`** - get full schema and parameters for a specific action. + + + **Description**: Get detailed information about a specific action, including required and optional parameters. Must provide category name and action name. ** Important **: It should only be called after you get the server categories from previous tool calls. + + **Parameters**: + - `category_name` (string, required): The name of the category + - `action_name` (string, required): The name of the action/operation within the category + + +### 4. Execute Action +**`execute_action`** - run actions with parameters and get results. + + + **Description**: Execute a specific action with the provided parameters. Must provide server name, action name, and action parameters. ** Important **: It should only be called after you get the action details from the get_action_details tool. + + **Parameters**: + - `server_name` (string, required): The name of the server + - `category_name` (string, required): The name of the category to execute the action for + - `action_name` (string, required): The name of the action/operation to execute + - `path_params` (string, optional): JSON string containing path parameters for the action + - `query_params` (string, optional): JSON string containing query parameters for the action + - `body_schema` (string, optional, default: "{}"): JSON string containing request body for actions + - `include_output_fields` (array, optional): Optional but strongly recommended when you know the response_schema of this action from previous tool calls: Array of field paths to include in the response. Only these fields will be returned. Use dot notation for nested fields (e.g., "author.displayName"). + - `maximum_output_characters` (integer, optional): Optional: Maximum number of characters to return in the response. If the response exceeds this limit, it will be truncated. Prefer include_output_fields over this. + + +### 5. Search Documentation +**`search_documentation`** - find relevant information only when needed. + + + **Description**: **SECONDARY OPTION**: Use this tool only when discover_server_categories doesn't provide sufficient detail or when you need to search within a specific server's documentation. Search for server action documentations by category, operation, tags, or functionality using keyword matching. This is not a natural language search - it matches exact keywords and phrases. Returns endpoints ranked by relevance. Use a few targeted keywords to find the best matches. Common patterns: category names ('projects', 'users', 'pipelines'), actions ('create', 'delete', 'list', 'get'), or combinations ('create user', 'list projects'). The search algorithm uses smart scoring to prevent verbose description fields from overwhelming results. + + **Parameters**: + - `query` (string, required): Search keywords that match API documentation terms. Best practices: (1) Use resource names like 'users', 'projects', 'files', (2) Add actions for precision like 'user create' or 'project delete', (3) Avoid filler words like 'how to', 'show me', 'all the' - focus on the core terms that appear in endpoint names and descriptions. + - `server_name` (string, required): Name of the server to search within. + - `max_results` (integer, optional, default: 10, minimum: 1, maximum: 50): Number of results to return. Default: 10 + + +### 6. Handle Auth Failure +**`handle_auth_failure`** - handle authentication only when needed. + + + **Description**: Handle authentication failures that occur when executing actions. CRITICAL: This tool should ONLY be called when execute_action fails specifically due to authentication issues (401 Unauthorized, invalid credentials, expired tokens, etc.). DO NOT call this tool to check authentication status or for any other purpose. Usage: (1) When execute_action returns an authentication error, call this tool with 'get_auth_url' to get authentication instructions. (2) When user provides authentication data after a failure, call this tool with 'save_auth_data' to save the credentials. NEVER call this tool if the failure is NOT an authentication failure (e.g., 404 Not Found, 500 Internal Server Error, etc.). + + **Parameters**: + - `server_name` (string, required): The name of the server that failed authentication during execute_action + - `intention` (string, required, enum: ["get_auth_url", "save_auth_data"]): Use 'get_auth_url' when execute_action fails with authentication errors to get authentication instructions. Use 'save_auth_data' when user provides authentication credentials after an authentication failure. + - `auth_data` (object, optional): Authentication data provided by user after an authentication failure (e.g., `{"token": "...", "api_key": "..."}`). Only used with 'save_auth_data' intention when resolving authentication failures. + + + +## Evaluation + +Strata delivers real results: + +- **MCPMark Benchmark**: Achieves **+15.2% higher pass@1 rate** vs the official GitHub server and **+13.4% higher pass@1 rate** vs the official Notion server. ([Source](https://mcpmark.ai/leaderboard/mcp)) +- **Human Evaluation**: Hits **83%+ accuracy** across >2k real world queries evaluation sets + +## Next Steps + + + + Create your first Strata server in minutes + + + Explore the complete Strata API + + diff --git a/docs/docs.json b/docs/docs.json new file mode 100644 index 00000000..e9346673 --- /dev/null +++ b/docs/docs.json @@ -0,0 +1,413 @@ +{ + "$schema": "https://mintlify.com/docs.json", + "theme": "mint", + "name": "Klavis AI", + "description": "Klavis AI is building Open Source MCP Integrations for AI applications.", + "colors": { + "primary": "#000000", + "light": "#ffffff", + "dark": "#000000" + }, + "logo": { + "light": "/images/logo/light.png", + "dark": "/images/logo/light.png", + "href": "https://www.klavis.ai/" + }, + "favicon": "images/logo/favicon.png", + "appearance": { + "default": "light", + "strict": true + }, + "navigation": { + "global": { + "anchors": [ + { + "anchor": "Website", + "href": "https://www.klavis.ai/", + "icon": "globe" + }, + { + "anchor": "Blog", + "href": "https://www.klavis.ai/blog", + "icon": "newspaper" + } + ] + }, + "tabs": [ + { + "tab": "Documentation", + "icon": "book-open", + "groups": [ + { + "group": "Get Started", + "pages": [ + "introduction", + "quickstart", + "installation" + ] + }, + { + "group": "Core Concepts", + "pages": [ + "concepts/mcp", + "concepts/strata" + ] + }, + { + "group": "Auth with Klavis", + "pages": [ + "auth/api-key", + "auth/oauth", + "auth/white-label" + ] + }, + { + "group": "Enterprise Security", + "pages": [ + "enterprise-security/klavis-security" + ] + }, + { + "group": "AI Platform Integrations", + "pages": [ + "ai-platform-integration/overview", + "ai-platform-integration/claude", + "ai-platform-integration/crewai", + "ai-platform-integration/fireworks-ai", + "ai-platform-integration/gemini", + "ai-platform-integration/google-adk", + "ai-platform-integration/langchain", + "ai-platform-integration/llamaindex", + "ai-platform-integration/mastra", + "ai-platform-integration/openai", + "ai-platform-integration/together-ai" + ] + }, + { + "group": "MCP Integrations", + "pages": [ + "mcp-server/overview", + "mcp-server/affinity", + "mcp-server/airtable", + "mcp-server/asana", + "mcp-server/attio", + "mcp-server/box", + "mcp-server/brave_search", + "mcp-server/cal_com", + "mcp-server/calendly", + "mcp-server/clickup", + "mcp-server/close", + "mcp-server/cloudflare", + "mcp-server/coinbase", + "mcp-server/confluence", + "mcp-server/dialpad", + "mcp-server/discord", + "mcp-server/doc2markdown", + "mcp-server/docusign", + "mcp-server/dropbox", + "mcp-server/elevenlabs", + "mcp-server/exa", + "mcp-server/fathom", + "mcp-server/figma", + "mcp-server/firecrawl-deep-research", + "mcp-server/firecrawl-web-search", + "mcp-server/fireflies", + "mcp-server/freshdesk", + "mcp-server/github", + "mcp-server/gitlab", + "mcp-server/gmail", + "mcp-server/gong", + "mcp-server/google_calendar", + "mcp-server/google_docs", + "mcp-server/google_drive", + "mcp-server/google_jobs", + "mcp-server/google_sheets", + "mcp-server/hacker_news", + "mcp-server/heygen", + "mcp-server/hubspot", + "mcp-server/jira", + "mcp-server/klavis-reportgen", + "mcp-server/klaviyo", + "mcp-server/linear", + "mcp-server/linkedin", + "mcp-server/markdown2doc", + "mcp-server/mem0", + "mcp-server/metabase", + "mcp-server/microsoft_teams", + "mcp-server/mixpanel", + "mcp-server/monday", + "mcp-server/moneybird", + "mcp-server/motion", + "mcp-server/notion", + "mcp-server/onedrive", + "mcp-server/openrouter", + "mcp-server/outlook", + "mcp-server/pagerduty", + "mcp-server/perplexity", + "mcp-server/pipedrive", + "mcp-server/plai", + "mcp-server/postgres", + "mcp-server/posthog", + "mcp-server/quickbooks", + "mcp-server/resend", + "mcp-server/salesforce", + "mcp-server/sendgrid", + "mcp-server/servicenow", + "mcp-server/shopify", + "mcp-server/slack", + "mcp-server/stripe", + "mcp-server/supabase", + "mcp-server/tavily", + "mcp-server/vercel", + "mcp-server/whatsapp", + "mcp-server/wordpress", + "mcp-server/youtube", + "mcp-server/zendesk" + ] + }, + { + "group": "Legacy", + "pages": [ + "legacy/instance" + ] + } + ] + }, + { + "tab": "API Reference", + "icon": "magnifying-glass", + "groups": [ + { + "group": "API References", + "pages": [ + "api-reference/introduction", + "api-reference/api_key" + ] + }, + { + "group": "Strata", + "pages": [ + "api-reference/strata/create", + "api-reference/strata/get", + "api-reference/strata/add", + "api-reference/strata/delete", + "api-reference/strata/list-raw-actions", + { + "group": "Strata Auth / OAuth", + "pages": [ + "api-reference/strata/set-strata-auth", + "api-reference/strata/get-strata-auth", + "api-reference/strata/delete-strata-auth" + ] + } + ] + }, + { + "group": "MCP Server Instance", + "pages": [ + "api-reference/mcp-server/create-a-server-instance", + "api-reference/mcp-server/get-server-instance", + "api-reference/mcp-server/delete-a-server-instance", + { + "group": "Instance Auth / OAuth", + "pages": [ + "api-reference/mcp-server/set-instance_auth", + "api-reference/mcp-server/get-instance_auth", + "api-reference/mcp-server/delete-instance_auth" + ] + } + ] + }, + { + "group": "User", + "pages": [ + "api-reference/user/get-user", + "api-reference/user/get-user-integrations", + "api-reference/user/get-all-users", + "api-reference/user/delete-user", + { + "group": "User Auth / OAuth", + "pages": [ + "api-reference/user/set-user-auth", + "api-reference/user/get-user-auth", + "api-reference/user/delete-user-auth" + ] + } + ] + }, + { + "group": "MCP Server Metadata", + "pages": [ + "api-reference/mcp-server/get-all-servers", + "api-reference/mcp-server/get-tools" + ] + }, + { + "group": "Function Calling with MCP", + "pages": [ + "api-reference/mcp-server/list-tools", + "api-reference/mcp-server/call-tool" + ] + }, + { + "group": "White Label OAuth", + "pages": [ + "api-reference/white-labeling/create", + "api-reference/white-labeling/get" + ] + }, + { + "group": "Self Hosted MCP Server", + "pages": [ + "api-reference/mcp-server/create-a-self-hosted-mcp-server-instance" + ] + }, + { + "group": "Individual OAuth", + "pages": [ + { + "group": "OAuth Servers", + "pages": [ + "api-reference/oauth/airtable-oauth/authorize-airtable", + "api-reference/oauth/asana-oauth/authorize-asana", + "api-reference/oauth/attio-oauth/authorize-attio", + "api-reference/oauth/box-oauth/authorize-box", + "api-reference/oauth/calcom-oauth/authorize-calcom", + "api-reference/oauth/canva-oauth/authorize-canva", + "api-reference/oauth/clickup-oauth/authorize-clickup", + "api-reference/oauth/close-oauth/authorize-close", + "api-reference/oauth/confluence-oauth/authorize-confluence", + "api-reference/oauth/dialpad-oauth/authorize-dialpad", + "api-reference/oauth/docusign-oauth/authorize-docusign", + "api-reference/oauth/dropbox-oauth/authorize-dropbox", + "api-reference/oauth/figma-oauth/authorize-figma", + "api-reference/oauth/gcalendar-oauth/authorize-gcalendar", + "api-reference/oauth/gdocs-oauth/authorize-gdocs", + "api-reference/oauth/gdrive-oauth/authorize-gdrive", + "api-reference/oauth/github-oauth/authorize-github", + "api-reference/oauth/gitlab-oauth/authorize-gitlab", + "api-reference/oauth/gmail-oauth/authorize-gmail", + "api-reference/oauth/gsheets-oauth/authorize-gsheets", + "api-reference/oauth/hubspot-oauth/authorize-hubspot", + "api-reference/oauth/jira-oauth/authorize-jira", + "api-reference/oauth/klaviyo-oauth/authorize-klaviyo", + "api-reference/oauth/linear-oauth/authorize-linear", + "api-reference/oauth/linkedin-oauth/authorize-linkedin", + "api-reference/oauth/notion-oauth/authorize-notion", + "api-reference/oauth/onedrive-oauth/authorize-onedrive", + "api-reference/oauth/outlook-oauth/authorize-outlook", + "api-reference/oauth/pagerduty-oauth/authorize-pagerduty", + "api-reference/oauth/pipedrive-oauth/authorize-pipedrive", + "api-reference/oauth/quickbooks-oauth/authorize-quickbooks", + "api-reference/oauth/salesforce-oauth/authorize-salesforce", + "api-reference/oauth/shopify-oauth/authorize-shopify", + "api-reference/oauth/slack-oauth/authorize-slack", + "api-reference/oauth/stripe-connect-oauth/authorize-stripe-connect", + "api-reference/oauth/supabase-oauth/authorize-supabase", + "api-reference/oauth/vercel-oauth/authorize-vercel", + "api-reference/oauth/wordpress-oauth/authorize-wordpress", + "api-reference/oauth/xero-oauth/authorize-xero", + "api-reference/oauth/zendesk-oauth/authorize-zendesk" + ] + } + ] + } + ] + }, + { + "tab": "Knowledge Base", + "icon": "book", + "groups": [ + { + "group": "Getting Started", + "pages": [ + "knowledge-base/introduction" + ] + }, + { + "group": "Onboarding", + "pages": [ + "knowledge-base/onboarding/create-your-first-mcp-server", + "knowledge-base/onboarding/use-your-first-mcp-server", + "knowledge-base/llm-based-development" + ] + }, + { + "group": "Use MCP Server", + "pages": [ + "knowledge-base/use-mcp-server/overview", + "knowledge-base/use-mcp-server/chatgpt", + "knowledge-base/use-mcp-server/claude_code", + "knowledge-base/use-mcp-server/claude_web_desktop", + "knowledge-base/use-mcp-server/cline", + "knowledge-base/use-mcp-server/continue", + "knowledge-base/use-mcp-server/cursor", + "knowledge-base/use-mcp-server/gemini_cli", + "knowledge-base/use-mcp-server/kiro", + "knowledge-base/use-mcp-server/n8n", + "knowledge-base/use-mcp-server/vs_code", + "knowledge-base/use-mcp-server/windsurf" + ] + }, + { + "group": "OAuth Apps", + "pages": [ + "knowledge-base/oauth_app/oauth-scopes", + "knowledge-base/oauth_app/airtable", + "knowledge-base/oauth_app/asana", + "knowledge-base/oauth_app/calendly", + "knowledge-base/oauth_app/canva", + "knowledge-base/oauth_app/discord", + "knowledge-base/oauth_app/dropbox", + "knowledge-base/oauth_app/figma", + "knowledge-base/oauth_app/gmail", + "knowledge-base/oauth_app/google_calendar", + "knowledge-base/oauth_app/google_docs", + "knowledge-base/oauth_app/google_drive", + "knowledge-base/oauth_app/google_sheets", + "knowledge-base/oauth_app/hubspot", + "knowledge-base/oauth_app/linkedin", + "knowledge-base/oauth_app/monday", + "knowledge-base/oauth_app/moneybird", + "knowledge-base/oauth_app/onedrive", + "knowledge-base/oauth_app/quickbooks", + "knowledge-base/oauth_app/salesforce", + "knowledge-base/oauth_app/slack", + "knowledge-base/oauth_app/xero" + ] + } + ] + } + ] + }, + "navbar": { + "links": [ + { + "label": "Dashboard", + "href": "https://www.klavis.ai/home" + } + ], + "primary": { + "type": "github", + "href": "https://github.com/Klavis-AI/klavis" + } + }, + "footer": { + "socials": { + "github": "https://github.com/Klavis-AI/klavis", + "linkedin": "https://www.linkedin.com/company/klavis-ai", + "discord": "https://discord.gg/p7TuTEcssn" + } + }, + "seo": { + "metatags": { + "canonical": "https://www.klavis.ai/docs" + } + }, + "redirects": [ + { + "source": "/documentation/:slug*", + "destination": "/:slug*" + } + ] +} \ No newline at end of file diff --git a/docs/enterprise-security/klavis-security.mdx b/docs/enterprise-security/klavis-security.mdx new file mode 100644 index 00000000..07c48e93 --- /dev/null +++ b/docs/enterprise-security/klavis-security.mdx @@ -0,0 +1,39 @@ +--- +title: "Klavis Security" +description: "Comprehensive security layer for MCP integrations protecting against prompt injection, tool poisoning, and other emerging threats." +--- + +![Klavis Security](/images/klavis-security/klavis_security.png) + +## Overview + +Klavis Guardrails is a comprehensive security layer designed to protect MCP (Model Context Protocol) integrations from emerging threats. It operates as an intelligent proxy between MCP clients and servers, providing real-time threat detection and policy enforcement. + +## The Security Challenge + +MCP's architecture amplifies security risks by exposing tools, resources, and prompts directly to AI agents. Recent vulnerabilities demonstrate critical flaws: + +- **Prompt Injection via Tool Descriptions**: Malicious instructions embedded in MCP tool metadata +- **Cross-Repository Information Leakage**: Agents coerced into accessing private repositories +- **Command Injection and RCE**: Basic security flaws allowing arbitrary code execution +- **Credential Theft**: MCP servers storing OAuth tokens become high-value targets + +![MCP Vulnerabilities](/images/klavis-security/MCP_vulnerabilities.png) + +## Security Architecture + +![Klavis Security Architecture](/images/klavis-security/klavis_security_architecture.png) + +Klavis Guardrails operates as a security proxy that intercepts, analyzes, and enforces policies on all MCP communication in real-time with four key protection mechanisms: + +**Tool Poisoning Detection**: Monitors MCP tool metadata using behavioral analysis to identify when tools deviate from declared functionality. + +**Prompt Injection Prevention**: Uses advanced NLP to analyze prompts for malicious instructions, detecting sophisticated attacks before they reach the model. + +**Privilege Escalation Monitoring**: Enforces granular access controls ensuring MCP servers operate under least privilege principles. + +**Command Injection Mitigation**: Performs deep inspection of tool invocations with strict allowlists and input sanitization. + +## Get Started + +**Ready to secure your MCP infrastructure?** Join our beta by [scheduling a 15-minute call](https://cal.com/zihao-lin-u35ykt/15min) with us, or reach out directly at security@klavis.ai. \ No newline at end of file diff --git a/docs/images/affinity.svg b/docs/images/affinity.svg new file mode 100644 index 00000000..6da09a7d --- /dev/null +++ b/docs/images/affinity.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/docs/images/ai-platform/anthropic.svg b/docs/images/ai-platform/anthropic.svg new file mode 100644 index 00000000..135a8b9f --- /dev/null +++ b/docs/images/ai-platform/anthropic.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/ai-platform/crewai-klavis.png b/docs/images/ai-platform/crewai-klavis.png new file mode 100644 index 00000000..939480bf Binary files /dev/null and b/docs/images/ai-platform/crewai-klavis.png differ diff --git a/docs/images/ai-platform/crewai.svg b/docs/images/ai-platform/crewai.svg new file mode 100644 index 00000000..95cb17f9 --- /dev/null +++ b/docs/images/ai-platform/crewai.svg @@ -0,0 +1 @@ +CrewAI \ No newline at end of file diff --git a/docs/images/ai-platform/fireworks-ai.svg b/docs/images/ai-platform/fireworks-ai.svg new file mode 100644 index 00000000..a23445cf --- /dev/null +++ b/docs/images/ai-platform/fireworks-ai.svg @@ -0,0 +1 @@ +Fireworks \ No newline at end of file diff --git a/docs/images/ai-platform/gemini.svg b/docs/images/ai-platform/gemini.svg new file mode 100644 index 00000000..878eb627 --- /dev/null +++ b/docs/images/ai-platform/gemini.svg @@ -0,0 +1 @@ +Gemini \ No newline at end of file diff --git a/docs/images/ai-platform/google-adk.png b/docs/images/ai-platform/google-adk.png new file mode 100644 index 00000000..9f967caa Binary files /dev/null and b/docs/images/ai-platform/google-adk.png differ diff --git a/docs/images/ai-platform/llamaindex-klavis.png b/docs/images/ai-platform/llamaindex-klavis.png new file mode 100644 index 00000000..4521ca72 Binary files /dev/null and b/docs/images/ai-platform/llamaindex-klavis.png differ diff --git a/docs/images/ai-platform/llamaindex.svg b/docs/images/ai-platform/llamaindex.svg new file mode 100644 index 00000000..6f6ebde7 --- /dev/null +++ b/docs/images/ai-platform/llamaindex.svg @@ -0,0 +1 @@ +LlamaIndex \ No newline at end of file diff --git a/docs/images/ai-platform/mastra-klavis.png b/docs/images/ai-platform/mastra-klavis.png new file mode 100644 index 00000000..4fab3407 Binary files /dev/null and b/docs/images/ai-platform/mastra-klavis.png differ diff --git a/docs/images/ai-platform/mastra.svg b/docs/images/ai-platform/mastra.svg new file mode 100644 index 00000000..278e61b8 --- /dev/null +++ b/docs/images/ai-platform/mastra.svg @@ -0,0 +1,302 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/ai-platform/openai.svg b/docs/images/ai-platform/openai.svg new file mode 100644 index 00000000..3b4eff96 --- /dev/null +++ b/docs/images/ai-platform/openai.svg @@ -0,0 +1,2 @@ + +OpenAI icon \ No newline at end of file diff --git a/docs/images/ai-platform/together-ai.svg b/docs/images/ai-platform/together-ai.svg new file mode 100644 index 00000000..a853e78a --- /dev/null +++ b/docs/images/ai-platform/together-ai.svg @@ -0,0 +1 @@ +together.ai \ No newline at end of file diff --git a/docs/images/airtable.svg b/docs/images/airtable.svg new file mode 100644 index 00000000..fd4f9454 --- /dev/null +++ b/docs/images/airtable.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/architecture.png b/docs/images/architecture.png new file mode 100644 index 00000000..87f3348d Binary files /dev/null and b/docs/images/architecture.png differ diff --git a/docs/images/asana.svg b/docs/images/asana.svg new file mode 100644 index 00000000..8a89c47f --- /dev/null +++ b/docs/images/asana.svg @@ -0,0 +1,6 @@ + + + + diff --git a/docs/images/attio.svg b/docs/images/attio.svg new file mode 100644 index 00000000..da52accd --- /dev/null +++ b/docs/images/attio.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/box.svg b/docs/images/box.svg new file mode 100644 index 00000000..1c88da6b --- /dev/null +++ b/docs/images/box.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/docs/images/brave_search.svg b/docs/images/brave_search.svg new file mode 100644 index 00000000..062dc894 --- /dev/null +++ b/docs/images/brave_search.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/cal_com.svg b/docs/images/cal_com.svg new file mode 100644 index 00000000..7d6972a2 --- /dev/null +++ b/docs/images/cal_com.svg @@ -0,0 +1,252 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/calendly.svg b/docs/images/calendly.svg new file mode 100644 index 00000000..4ddcfb68 --- /dev/null +++ b/docs/images/calendly.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/claude.svg b/docs/images/claude.svg new file mode 100644 index 00000000..62dc0db1 --- /dev/null +++ b/docs/images/claude.svg @@ -0,0 +1 @@ +Claude \ No newline at end of file diff --git a/docs/images/clickup.svg b/docs/images/clickup.svg new file mode 100644 index 00000000..0ae45e9b --- /dev/null +++ b/docs/images/clickup.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/cline.svg b/docs/images/cline.svg new file mode 100644 index 00000000..28939293 --- /dev/null +++ b/docs/images/cline.svg @@ -0,0 +1,87 @@ + + + + + + \ No newline at end of file diff --git a/docs/images/close.svg b/docs/images/close.svg new file mode 100644 index 00000000..46e7407d --- /dev/null +++ b/docs/images/close.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/docs/images/cloudflare.svg b/docs/images/cloudflare.svg new file mode 100644 index 00000000..ebfa9f0f --- /dev/null +++ b/docs/images/cloudflare.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/concepts/mcp-simple-diagram.avif b/docs/images/concepts/mcp-simple-diagram.avif new file mode 100644 index 00000000..f78d36cc Binary files /dev/null and b/docs/images/concepts/mcp-simple-diagram.avif differ diff --git a/docs/images/concepts/strata_hero.png b/docs/images/concepts/strata_hero.png new file mode 100644 index 00000000..020f8bfb Binary files /dev/null and b/docs/images/concepts/strata_hero.png differ diff --git a/docs/images/confluence.svg b/docs/images/confluence.svg new file mode 100644 index 00000000..ee2ffdf9 --- /dev/null +++ b/docs/images/confluence.svg @@ -0,0 +1 @@ + diff --git a/docs/images/continue.svg b/docs/images/continue.svg new file mode 100644 index 00000000..2ab70a41 --- /dev/null +++ b/docs/images/continue.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/create_white_label_ui.png b/docs/images/create_white_label_ui.png new file mode 100644 index 00000000..eee33b8f Binary files /dev/null and b/docs/images/create_white_label_ui.png differ diff --git a/docs/images/cursor.svg b/docs/images/cursor.svg new file mode 100644 index 00000000..abadee50 --- /dev/null +++ b/docs/images/cursor.svg @@ -0,0 +1 @@ +Cursor \ No newline at end of file diff --git a/docs/images/develop-with-ai/ask_ai.png b/docs/images/develop-with-ai/ask_ai.png new file mode 100644 index 00000000..a3e63188 Binary files /dev/null and b/docs/images/develop-with-ai/ask_ai.png differ diff --git a/docs/images/dialpad.svg b/docs/images/dialpad.svg new file mode 100644 index 00000000..c656f1d6 --- /dev/null +++ b/docs/images/dialpad.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/docs/images/discord.svg b/docs/images/discord.svg new file mode 100644 index 00000000..5806fa7f --- /dev/null +++ b/docs/images/discord.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/docusign.svg b/docs/images/docusign.svg new file mode 100644 index 00000000..14c17bfd --- /dev/null +++ b/docs/images/docusign.svg @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/dropbox.svg b/docs/images/dropbox.svg new file mode 100644 index 00000000..e467522a --- /dev/null +++ b/docs/images/dropbox.svg @@ -0,0 +1 @@ + diff --git a/docs/images/elevenlabs.svg b/docs/images/elevenlabs.svg new file mode 100644 index 00000000..4c1210cf --- /dev/null +++ b/docs/images/elevenlabs.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs/images/exa.png b/docs/images/exa.png new file mode 100644 index 00000000..09a05730 Binary files /dev/null and b/docs/images/exa.png differ diff --git a/docs/images/fathom.webp b/docs/images/fathom.webp new file mode 100644 index 00000000..b1456215 Binary files /dev/null and b/docs/images/fathom.webp differ diff --git a/docs/images/figma.svg b/docs/images/figma.svg new file mode 100644 index 00000000..14beab17 --- /dev/null +++ b/docs/images/figma.svg @@ -0,0 +1 @@ +Figma.logoCreated using Figma \ No newline at end of file diff --git a/docs/images/firecrawl.svg b/docs/images/firecrawl.svg new file mode 100644 index 00000000..e5008ad3 --- /dev/null +++ b/docs/images/firecrawl.svg @@ -0,0 +1,4 @@ + + + šŸ”„ + \ No newline at end of file diff --git a/docs/images/fireflies.svg b/docs/images/fireflies.svg new file mode 100644 index 00000000..c4f7120d --- /dev/null +++ b/docs/images/fireflies.svg @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/freshdesk.svg b/docs/images/freshdesk.svg new file mode 100644 index 00000000..ecb5c6e0 --- /dev/null +++ b/docs/images/freshdesk.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/gemini.svg b/docs/images/gemini.svg new file mode 100644 index 00000000..f1cf3575 --- /dev/null +++ b/docs/images/gemini.svg @@ -0,0 +1 @@ +Gemini \ No newline at end of file diff --git a/docs/images/get-started/introduction/bridge.png b/docs/images/get-started/introduction/bridge.png new file mode 100644 index 00000000..2e3dab71 Binary files /dev/null and b/docs/images/get-started/introduction/bridge.png differ diff --git a/docs/images/get-started/quickstart/individual_url.png b/docs/images/get-started/quickstart/individual_url.png new file mode 100644 index 00000000..10398770 Binary files /dev/null and b/docs/images/get-started/quickstart/individual_url.png differ diff --git a/docs/images/get-started/quickstart/strata_ui.png b/docs/images/get-started/quickstart/strata_ui.png new file mode 100644 index 00000000..6a281223 Binary files /dev/null and b/docs/images/get-started/quickstart/strata_ui.png differ diff --git a/docs/images/github.svg b/docs/images/github.svg new file mode 100644 index 00000000..0da0d911 --- /dev/null +++ b/docs/images/github.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/github_copilot.svg b/docs/images/github_copilot.svg new file mode 100644 index 00000000..3cbf22a5 --- /dev/null +++ b/docs/images/github_copilot.svg @@ -0,0 +1 @@ +GithubCopilot \ No newline at end of file diff --git a/docs/images/gitlab.svg b/docs/images/gitlab.svg new file mode 100644 index 00000000..dd0753e4 --- /dev/null +++ b/docs/images/gitlab.svg @@ -0,0 +1,22 @@ + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/gmail.svg b/docs/images/gmail.svg new file mode 100644 index 00000000..e540ae9e --- /dev/null +++ b/docs/images/gmail.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/gong.svg b/docs/images/gong.svg new file mode 100644 index 00000000..8848add5 --- /dev/null +++ b/docs/images/gong.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/google_calendar.svg b/docs/images/google_calendar.svg new file mode 100644 index 00000000..c32c0c77 --- /dev/null +++ b/docs/images/google_calendar.svg @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + diff --git a/docs/images/google_docs.svg b/docs/images/google_docs.svg new file mode 100644 index 00000000..0ce8e2ad --- /dev/null +++ b/docs/images/google_docs.svg @@ -0,0 +1,15 @@ + + + + + + + + + + diff --git a/docs/images/google_drive.svg b/docs/images/google_drive.svg new file mode 100644 index 00000000..bf8d5d39 --- /dev/null +++ b/docs/images/google_drive.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/docs/images/google_jobs.webp b/docs/images/google_jobs.webp new file mode 100644 index 00000000..86a14164 Binary files /dev/null and b/docs/images/google_jobs.webp differ diff --git a/docs/images/google_sheets.svg b/docs/images/google_sheets.svg new file mode 100644 index 00000000..72771d5e --- /dev/null +++ b/docs/images/google_sheets.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/hacker_news.svg b/docs/images/hacker_news.svg new file mode 100644 index 00000000..0fa3f8af --- /dev/null +++ b/docs/images/hacker_news.svg @@ -0,0 +1,421 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/heygen.webp b/docs/images/heygen.webp new file mode 100644 index 00000000..38f1a3a0 Binary files /dev/null and b/docs/images/heygen.webp differ diff --git a/docs/images/hubspot.svg b/docs/images/hubspot.svg new file mode 100644 index 00000000..be31d302 --- /dev/null +++ b/docs/images/hubspot.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/jira.svg b/docs/images/jira.svg new file mode 100644 index 00000000..5d1b2fc6 --- /dev/null +++ b/docs/images/jira.svg @@ -0,0 +1 @@ + diff --git a/docs/images/k.svg b/docs/images/k.svg new file mode 100644 index 00000000..e6737479 --- /dev/null +++ b/docs/images/k.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/kiro.svg b/docs/images/kiro.svg new file mode 100644 index 00000000..f2ab8980 --- /dev/null +++ b/docs/images/kiro.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/klavis-security/MCP_vulnerabilities.png b/docs/images/klavis-security/MCP_vulnerabilities.png new file mode 100644 index 00000000..9cc0ee4a Binary files /dev/null and b/docs/images/klavis-security/MCP_vulnerabilities.png differ diff --git a/docs/images/klavis-security/klavis_security.png b/docs/images/klavis-security/klavis_security.png new file mode 100644 index 00000000..2a0e4016 Binary files /dev/null and b/docs/images/klavis-security/klavis_security.png differ diff --git a/docs/images/klavis-security/klavis_security_architecture.png b/docs/images/klavis-security/klavis_security_architecture.png new file mode 100644 index 00000000..6cee45d5 Binary files /dev/null and b/docs/images/klavis-security/klavis_security_architecture.png differ diff --git a/docs/images/klavis.svg b/docs/images/klavis.svg new file mode 100644 index 00000000..12df1a5a --- /dev/null +++ b/docs/images/klavis.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/docs/images/klavis_reportgen.webp b/docs/images/klavis_reportgen.webp new file mode 100644 index 00000000..cd71831f Binary files /dev/null and b/docs/images/klavis_reportgen.webp differ diff --git a/docs/images/klaviyo.svg b/docs/images/klaviyo.svg new file mode 100644 index 00000000..63523377 --- /dev/null +++ b/docs/images/klaviyo.svg @@ -0,0 +1,33 @@ + + + + + + + + \ No newline at end of file diff --git a/docs/images/knowledge-base/airtable_oauth_app/airtable_step1_oauth.png b/docs/images/knowledge-base/airtable_oauth_app/airtable_step1_oauth.png new file mode 100644 index 00000000..1f76aebc Binary files /dev/null and b/docs/images/knowledge-base/airtable_oauth_app/airtable_step1_oauth.png differ diff --git a/docs/images/knowledge-base/airtable_oauth_app/airtable_step2_redirect.png b/docs/images/knowledge-base/airtable_oauth_app/airtable_step2_redirect.png new file mode 100644 index 00000000..7e0c309e Binary files /dev/null and b/docs/images/knowledge-base/airtable_oauth_app/airtable_step2_redirect.png differ diff --git a/docs/images/knowledge-base/airtable_oauth_app/airtable_step3_scopes.png b/docs/images/knowledge-base/airtable_oauth_app/airtable_step3_scopes.png new file mode 100644 index 00000000..ba60e474 Binary files /dev/null and b/docs/images/knowledge-base/airtable_oauth_app/airtable_step3_scopes.png differ diff --git a/docs/images/knowledge-base/asana_oauth_app/asana_step1_login.png b/docs/images/knowledge-base/asana_oauth_app/asana_step1_login.png new file mode 100644 index 00000000..a9fed794 Binary files /dev/null and b/docs/images/knowledge-base/asana_oauth_app/asana_step1_login.png differ diff --git a/docs/images/knowledge-base/asana_oauth_app/asana_step2_create_app.png b/docs/images/knowledge-base/asana_oauth_app/asana_step2_create_app.png new file mode 100644 index 00000000..13aa2f8d Binary files /dev/null and b/docs/images/knowledge-base/asana_oauth_app/asana_step2_create_app.png differ diff --git a/docs/images/knowledge-base/asana_oauth_app/asana_step3_oauth_and_redirect_url.png b/docs/images/knowledge-base/asana_oauth_app/asana_step3_oauth_and_redirect_url.png new file mode 100644 index 00000000..a3d6f086 Binary files /dev/null and b/docs/images/knowledge-base/asana_oauth_app/asana_step3_oauth_and_redirect_url.png differ diff --git a/docs/images/knowledge-base/asana_oauth_app/asana_step4_request_scopes.png b/docs/images/knowledge-base/asana_oauth_app/asana_step4_request_scopes.png new file mode 100644 index 00000000..9ade78d8 Binary files /dev/null and b/docs/images/knowledge-base/asana_oauth_app/asana_step4_request_scopes.png differ diff --git a/docs/images/knowledge-base/calendly_oauth_app/calendly_step1_login.png b/docs/images/knowledge-base/calendly_oauth_app/calendly_step1_login.png new file mode 100644 index 00000000..71992b4c Binary files /dev/null and b/docs/images/knowledge-base/calendly_oauth_app/calendly_step1_login.png differ diff --git a/docs/images/knowledge-base/calendly_oauth_app/calendly_step2_create_app.png b/docs/images/knowledge-base/calendly_oauth_app/calendly_step2_create_app.png new file mode 100644 index 00000000..bd48cc94 Binary files /dev/null and b/docs/images/knowledge-base/calendly_oauth_app/calendly_step2_create_app.png differ diff --git a/docs/images/knowledge-base/calendly_oauth_app/calendly_step3_client_id_and_secret.png b/docs/images/knowledge-base/calendly_oauth_app/calendly_step3_client_id_and_secret.png new file mode 100644 index 00000000..fdfd8784 Binary files /dev/null and b/docs/images/knowledge-base/calendly_oauth_app/calendly_step3_client_id_and_secret.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step1_with_password.png b/docs/images/knowledge-base/canva_oauth_app/step1_with_password.png new file mode 100644 index 00000000..fa185a69 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step1_with_password.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step1_without_password.png b/docs/images/knowledge-base/canva_oauth_app/step1_without_password.png new file mode 100644 index 00000000..d43fd722 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step1_without_password.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step2_config.png b/docs/images/knowledge-base/canva_oauth_app/step2_config.png new file mode 100644 index 00000000..33e159c0 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step2_config.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step3_scope.png b/docs/images/knowledge-base/canva_oauth_app/step3_scope.png new file mode 100644 index 00000000..ffd3ecef Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step3_scope.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step4_redirect_url.png b/docs/images/knowledge-base/canva_oauth_app/step4_redirect_url.png new file mode 100644 index 00000000..b6116a7b Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step4_redirect_url.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step5_submit_for_review.png b/docs/images/knowledge-base/canva_oauth_app/step5_submit_for_review.png new file mode 100644 index 00000000..d6e98465 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step5_submit_for_review.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step6_in_review.png b/docs/images/knowledge-base/canva_oauth_app/step6_in_review.png new file mode 100644 index 00000000..6a935386 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step6_in_review.png differ diff --git a/docs/images/knowledge-base/canva_oauth_app/step7_review_email.png b/docs/images/knowledge-base/canva_oauth_app/step7_review_email.png new file mode 100644 index 00000000..ed6dbf13 Binary files /dev/null and b/docs/images/knowledge-base/canva_oauth_app/step7_review_email.png differ diff --git a/docs/images/knowledge-base/chatGPT/ChatGPT.png b/docs/images/knowledge-base/chatGPT/ChatGPT.png new file mode 100644 index 00000000..23d2fd90 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/ChatGPT.png differ diff --git a/docs/images/knowledge-base/chatGPT/step1.png b/docs/images/knowledge-base/chatGPT/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step1.png differ diff --git a/docs/images/knowledge-base/chatGPT/step10.png b/docs/images/knowledge-base/chatGPT/step10.png new file mode 100644 index 00000000..45e456d0 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step10.png differ diff --git a/docs/images/knowledge-base/chatGPT/step2.png b/docs/images/knowledge-base/chatGPT/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step2.png differ diff --git a/docs/images/knowledge-base/chatGPT/step3.png b/docs/images/knowledge-base/chatGPT/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step3.png differ diff --git a/docs/images/knowledge-base/chatGPT/step4.png b/docs/images/knowledge-base/chatGPT/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step4.png differ diff --git a/docs/images/knowledge-base/chatGPT/step5.png b/docs/images/knowledge-base/chatGPT/step5.png new file mode 100644 index 00000000..dbe79208 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step5.png differ diff --git a/docs/images/knowledge-base/chatGPT/step6.png b/docs/images/knowledge-base/chatGPT/step6.png new file mode 100644 index 00000000..70976745 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step6.png differ diff --git a/docs/images/knowledge-base/chatGPT/step7.png b/docs/images/knowledge-base/chatGPT/step7.png new file mode 100644 index 00000000..243030af Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step7.png differ diff --git a/docs/images/knowledge-base/chatGPT/step8.png b/docs/images/knowledge-base/chatGPT/step8.png new file mode 100644 index 00000000..57206f90 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step8.png differ diff --git a/docs/images/knowledge-base/chatGPT/step9.jpg b/docs/images/knowledge-base/chatGPT/step9.jpg new file mode 100644 index 00000000..9e971745 Binary files /dev/null and b/docs/images/knowledge-base/chatGPT/step9.jpg differ diff --git a/docs/images/knowledge-base/claude_code/Claude_Code.png b/docs/images/knowledge-base/claude_code/Claude_Code.png new file mode 100644 index 00000000..ac5cb093 Binary files /dev/null and b/docs/images/knowledge-base/claude_code/Claude_Code.png differ diff --git a/docs/images/knowledge-base/claude_code/step1.png b/docs/images/knowledge-base/claude_code/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step1.png differ diff --git a/docs/images/knowledge-base/claude_code/step2.png b/docs/images/knowledge-base/claude_code/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step2.png differ diff --git a/docs/images/knowledge-base/claude_code/step3.png b/docs/images/knowledge-base/claude_code/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step3.png differ diff --git a/docs/images/knowledge-base/claude_code/step4.png b/docs/images/knowledge-base/claude_code/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step4.png differ diff --git a/docs/images/knowledge-base/claude_code/step5.png b/docs/images/knowledge-base/claude_code/step5.png new file mode 100644 index 00000000..904ac6fc Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step5.png differ diff --git a/docs/images/knowledge-base/claude_code/step6.png b/docs/images/knowledge-base/claude_code/step6.png new file mode 100644 index 00000000..4b1e1bfe Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step6.png differ diff --git a/docs/images/knowledge-base/claude_code/step7.png b/docs/images/knowledge-base/claude_code/step7.png new file mode 100644 index 00000000..f1ffd751 Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step7.png differ diff --git a/docs/images/knowledge-base/claude_code/step8.jpg b/docs/images/knowledge-base/claude_code/step8.jpg new file mode 100644 index 00000000..69018ad9 Binary files /dev/null and b/docs/images/knowledge-base/claude_code/step8.jpg differ diff --git a/docs/images/knowledge-base/claude_web_desktop/claude_web_desktop.png b/docs/images/knowledge-base/claude_web_desktop/claude_web_desktop.png new file mode 100644 index 00000000..63a961b6 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/claude_web_desktop.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step1.png b/docs/images/knowledge-base/claude_web_desktop/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step1.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step2.png b/docs/images/knowledge-base/claude_web_desktop/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step2.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step3.png b/docs/images/knowledge-base/claude_web_desktop/step3.png new file mode 100644 index 00000000..85e11a57 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step3.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step4.png b/docs/images/knowledge-base/claude_web_desktop/step4.png new file mode 100644 index 00000000..83ff38e1 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step4.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step5.png b/docs/images/knowledge-base/claude_web_desktop/step5.png new file mode 100644 index 00000000..9b53aad3 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step5.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step6.png b/docs/images/knowledge-base/claude_web_desktop/step6.png new file mode 100644 index 00000000..dd75cc91 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step6.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step7.png b/docs/images/knowledge-base/claude_web_desktop/step7.png new file mode 100644 index 00000000..61f10cd1 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step7.png differ diff --git a/docs/images/knowledge-base/claude_web_desktop/step8.jpg b/docs/images/knowledge-base/claude_web_desktop/step8.jpg new file mode 100644 index 00000000..cd526db7 Binary files /dev/null and b/docs/images/knowledge-base/claude_web_desktop/step8.jpg differ diff --git a/docs/images/knowledge-base/cline/Cline.png b/docs/images/knowledge-base/cline/Cline.png new file mode 100644 index 00000000..8d84f819 Binary files /dev/null and b/docs/images/knowledge-base/cline/Cline.png differ diff --git a/docs/images/knowledge-base/cline/step1.png b/docs/images/knowledge-base/cline/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/cline/step1.png differ diff --git a/docs/images/knowledge-base/cline/step2.png b/docs/images/knowledge-base/cline/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/cline/step2.png differ diff --git a/docs/images/knowledge-base/cline/step3.png b/docs/images/knowledge-base/cline/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/cline/step3.png differ diff --git a/docs/images/knowledge-base/cline/step4.png b/docs/images/knowledge-base/cline/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/cline/step4.png differ diff --git a/docs/images/knowledge-base/cline/step5.png b/docs/images/knowledge-base/cline/step5.png new file mode 100644 index 00000000..b8cd2367 Binary files /dev/null and b/docs/images/knowledge-base/cline/step5.png differ diff --git a/docs/images/knowledge-base/cline/step6.png b/docs/images/knowledge-base/cline/step6.png new file mode 100644 index 00000000..637a653f Binary files /dev/null and b/docs/images/knowledge-base/cline/step6.png differ diff --git a/docs/images/knowledge-base/cline/step7.png b/docs/images/knowledge-base/cline/step7.png new file mode 100644 index 00000000..2d8eb859 Binary files /dev/null and b/docs/images/knowledge-base/cline/step7.png differ diff --git a/docs/images/knowledge-base/cline/step8.png b/docs/images/knowledge-base/cline/step8.png new file mode 100644 index 00000000..0fd2f44d Binary files /dev/null and b/docs/images/knowledge-base/cline/step8.png differ diff --git a/docs/images/knowledge-base/cline/step9.jpg b/docs/images/knowledge-base/cline/step9.jpg new file mode 100644 index 00000000..dc3786ad Binary files /dev/null and b/docs/images/knowledge-base/cline/step9.jpg differ diff --git a/docs/images/knowledge-base/continue/Continue.png b/docs/images/knowledge-base/continue/Continue.png new file mode 100644 index 00000000..ed7ffa64 Binary files /dev/null and b/docs/images/knowledge-base/continue/Continue.png differ diff --git a/docs/images/knowledge-base/continue/step1.png b/docs/images/knowledge-base/continue/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/continue/step1.png differ diff --git a/docs/images/knowledge-base/continue/step2.png b/docs/images/knowledge-base/continue/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/continue/step2.png differ diff --git a/docs/images/knowledge-base/continue/step3.png b/docs/images/knowledge-base/continue/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/continue/step3.png differ diff --git a/docs/images/knowledge-base/continue/step4.png b/docs/images/knowledge-base/continue/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/continue/step4.png differ diff --git a/docs/images/knowledge-base/continue/step5.png b/docs/images/knowledge-base/continue/step5.png new file mode 100644 index 00000000..cb2daadf Binary files /dev/null and b/docs/images/knowledge-base/continue/step5.png differ diff --git a/docs/images/knowledge-base/continue/step6.png b/docs/images/knowledge-base/continue/step6.png new file mode 100644 index 00000000..de53a000 Binary files /dev/null and b/docs/images/knowledge-base/continue/step6.png differ diff --git a/docs/images/knowledge-base/continue/step7.png b/docs/images/knowledge-base/continue/step7.png new file mode 100644 index 00000000..bda92cb1 Binary files /dev/null and b/docs/images/knowledge-base/continue/step7.png differ diff --git a/docs/images/knowledge-base/continue/step8.png b/docs/images/knowledge-base/continue/step8.png new file mode 100644 index 00000000..55e6e27b Binary files /dev/null and b/docs/images/knowledge-base/continue/step8.png differ diff --git a/docs/images/knowledge-base/continue/step9.jpg b/docs/images/knowledge-base/continue/step9.jpg new file mode 100644 index 00000000..6bb27e14 Binary files /dev/null and b/docs/images/knowledge-base/continue/step9.jpg differ diff --git a/docs/images/knowledge-base/cursor/Cursor.png b/docs/images/knowledge-base/cursor/Cursor.png new file mode 100644 index 00000000..db660430 Binary files /dev/null and b/docs/images/knowledge-base/cursor/Cursor.png differ diff --git a/docs/images/knowledge-base/cursor/step1.png b/docs/images/knowledge-base/cursor/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/cursor/step1.png differ diff --git a/docs/images/knowledge-base/cursor/step2.png b/docs/images/knowledge-base/cursor/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/cursor/step2.png differ diff --git a/docs/images/knowledge-base/cursor/step3.png b/docs/images/knowledge-base/cursor/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/cursor/step3.png differ diff --git a/docs/images/knowledge-base/cursor/step4.png b/docs/images/knowledge-base/cursor/step4.png new file mode 100644 index 00000000..d223ee87 Binary files /dev/null and b/docs/images/knowledge-base/cursor/step4.png differ diff --git a/docs/images/knowledge-base/cursor/step5.png b/docs/images/knowledge-base/cursor/step5.png new file mode 100644 index 00000000..dd6ab9d9 Binary files /dev/null and b/docs/images/knowledge-base/cursor/step5.png differ diff --git a/docs/images/knowledge-base/cursor/step6.png b/docs/images/knowledge-base/cursor/step6.png new file mode 100644 index 00000000..53b70fb3 Binary files /dev/null and b/docs/images/knowledge-base/cursor/step6.png differ diff --git a/docs/images/knowledge-base/cursor/step7.png b/docs/images/knowledge-base/cursor/step7.png new file mode 100644 index 00000000..49345c63 Binary files /dev/null and b/docs/images/knowledge-base/cursor/step7.png differ diff --git a/docs/images/knowledge-base/cursor/step8.png b/docs/images/knowledge-base/cursor/step8.png new file mode 100644 index 00000000..d3a6118f Binary files /dev/null and b/docs/images/knowledge-base/cursor/step8.png differ diff --git a/docs/images/knowledge-base/cursor/step9.jpg b/docs/images/knowledge-base/cursor/step9.jpg new file mode 100644 index 00000000..27c5c40d Binary files /dev/null and b/docs/images/knowledge-base/cursor/step9.jpg differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step1_login.png b/docs/images/knowledge-base/discord_oauth_app/discord_step1_login.png new file mode 100644 index 00000000..6cfe1a4e Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step1_login.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step2_create_new_app.png b/docs/images/knowledge-base/discord_oauth_app/discord_step2_create_new_app.png new file mode 100644 index 00000000..3a783da8 Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step2_create_new_app.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step3_app_info.png b/docs/images/knowledge-base/discord_oauth_app/discord_step3_app_info.png new file mode 100644 index 00000000..d20abf4e Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step3_app_info.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step4_get_discord_token.png b/docs/images/knowledge-base/discord_oauth_app/discord_step4_get_discord_token.png new file mode 100644 index 00000000..9d7bd6bb Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step4_get_discord_token.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step5_bot_permissions.png b/docs/images/knowledge-base/discord_oauth_app/discord_step5_bot_permissions.png new file mode 100644 index 00000000..76d1f673 Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step5_bot_permissions.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step6_oauth_settings.png b/docs/images/knowledge-base/discord_oauth_app/discord_step6_oauth_settings.png new file mode 100644 index 00000000..b80437d1 Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step6_oauth_settings.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step7_oauth2_url_generator.png b/docs/images/knowledge-base/discord_oauth_app/discord_step7_oauth2_url_generator.png new file mode 100644 index 00000000..f5ba0b0b Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step7_oauth2_url_generator.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/discord_step8_generate_oauth_url.png b/docs/images/knowledge-base/discord_oauth_app/discord_step8_generate_oauth_url.png new file mode 100644 index 00000000..37bd05a9 Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/discord_step8_generate_oauth_url.png differ diff --git a/docs/images/knowledge-base/discord_oauth_app/step9.png b/docs/images/knowledge-base/discord_oauth_app/step9.png new file mode 100644 index 00000000..f765dea3 Binary files /dev/null and b/docs/images/knowledge-base/discord_oauth_app/step9.png differ diff --git a/docs/images/knowledge-base/dropbox_oauth_app/step1_create_app.png b/docs/images/knowledge-base/dropbox_oauth_app/step1_create_app.png new file mode 100644 index 00000000..b925c3e9 Binary files /dev/null and b/docs/images/knowledge-base/dropbox_oauth_app/step1_create_app.png differ diff --git a/docs/images/knowledge-base/dropbox_oauth_app/step2_oauth_settings.png b/docs/images/knowledge-base/dropbox_oauth_app/step2_oauth_settings.png new file mode 100644 index 00000000..9c6e2d2b Binary files /dev/null and b/docs/images/knowledge-base/dropbox_oauth_app/step2_oauth_settings.png differ diff --git a/docs/images/knowledge-base/dropbox_oauth_app/step3_permissions.png b/docs/images/knowledge-base/dropbox_oauth_app/step3_permissions.png new file mode 100644 index 00000000..1c94ee0f Binary files /dev/null and b/docs/images/knowledge-base/dropbox_oauth_app/step3_permissions.png differ diff --git a/docs/images/knowledge-base/figma_oauth_app/figma_step1_login.png b/docs/images/knowledge-base/figma_oauth_app/figma_step1_login.png new file mode 100644 index 00000000..30f74495 Binary files /dev/null and b/docs/images/knowledge-base/figma_oauth_app/figma_step1_login.png differ diff --git a/docs/images/knowledge-base/figma_oauth_app/figma_step2_create_app.png b/docs/images/knowledge-base/figma_oauth_app/figma_step2_create_app.png new file mode 100644 index 00000000..806a3288 Binary files /dev/null and b/docs/images/knowledge-base/figma_oauth_app/figma_step2_create_app.png differ diff --git a/docs/images/knowledge-base/figma_oauth_app/figma_step3_client_id_secret_and_redirect_url.png b/docs/images/knowledge-base/figma_oauth_app/figma_step3_client_id_secret_and_redirect_url.png new file mode 100644 index 00000000..d07ac97b Binary files /dev/null and b/docs/images/knowledge-base/figma_oauth_app/figma_step3_client_id_secret_and_redirect_url.png differ diff --git a/docs/images/knowledge-base/gemini_cli/Gemini_CLI.png b/docs/images/knowledge-base/gemini_cli/Gemini_CLI.png new file mode 100644 index 00000000..2e26df7f Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/Gemini_CLI.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step1.png b/docs/images/knowledge-base/gemini_cli/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step1.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step2.png b/docs/images/knowledge-base/gemini_cli/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step2.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step3.png b/docs/images/knowledge-base/gemini_cli/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step3.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step4.png b/docs/images/knowledge-base/gemini_cli/step4.png new file mode 100644 index 00000000..48f1b99a Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step4.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step5.png b/docs/images/knowledge-base/gemini_cli/step5.png new file mode 100644 index 00000000..e03f7fc7 Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step5.png differ diff --git a/docs/images/knowledge-base/gemini_cli/step6.jpg b/docs/images/knowledge-base/gemini_cli/step6.jpg new file mode 100644 index 00000000..ad24c2da Binary files /dev/null and b/docs/images/knowledge-base/gemini_cli/step6.jpg differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/scope1_oauth.png b/docs/images/knowledge-base/gmail_oauth_app/scope1_oauth.png new file mode 100644 index 00000000..9274f1bf Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/scope1_oauth.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/scope2_data_access.png b/docs/images/knowledge-base/gmail_oauth_app/scope2_data_access.png new file mode 100644 index 00000000..b978d87a Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/scope2_data_access.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/scope3_add_scopes.png b/docs/images/knowledge-base/gmail_oauth_app/scope3_add_scopes.png new file mode 100644 index 00000000..cbb5f918 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/scope3_add_scopes.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step10_info.png b/docs/images/knowledge-base/gmail_oauth_app/step10_info.png new file mode 100644 index 00000000..6eb9509b Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step10_info.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step11_CreateCredentials.png b/docs/images/knowledge-base/gmail_oauth_app/step11_CreateCredentials.png new file mode 100644 index 00000000..b60157cb Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step11_CreateCredentials.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step12_OAuthClientID.png b/docs/images/knowledge-base/gmail_oauth_app/step12_OAuthClientID.png new file mode 100644 index 00000000..f083c113 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step12_OAuthClientID.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step13_WebApplication.png b/docs/images/knowledge-base/gmail_oauth_app/step13_WebApplication.png new file mode 100644 index 00000000..66efc323 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step13_WebApplication.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step14_redirect_uri.png b/docs/images/knowledge-base/gmail_oauth_app/step14_redirect_uri.png new file mode 100644 index 00000000..fbaf9949 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step14_redirect_uri.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step15_credentials.png b/docs/images/knowledge-base/gmail_oauth_app/step15_credentials.png new file mode 100644 index 00000000..aa1d161a Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step15_credentials.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step1_box.png b/docs/images/knowledge-base/gmail_oauth_app/step1_box.png new file mode 100644 index 00000000..9a506189 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step1_box.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step2_new_project.png b/docs/images/knowledge-base/gmail_oauth_app/step2_new_project.png new file mode 100644 index 00000000..cde07c66 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step2_new_project.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step3_create.png b/docs/images/knowledge-base/gmail_oauth_app/step3_create.png new file mode 100644 index 00000000..85cdd218 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step3_create.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step4_select_project.png b/docs/images/knowledge-base/gmail_oauth_app/step4_select_project.png new file mode 100644 index 00000000..568f62f5 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step4_select_project.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step5_apis_and_services.png b/docs/images/knowledge-base/gmail_oauth_app/step5_apis_and_services.png new file mode 100644 index 00000000..5911f07b Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step5_apis_and_services.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step6_enable_api_services.png b/docs/images/knowledge-base/gmail_oauth_app/step6_enable_api_services.png new file mode 100644 index 00000000..409a42d4 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step6_enable_api_services.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step7_select_gmail.png b/docs/images/knowledge-base/gmail_oauth_app/step7_select_gmail.png new file mode 100644 index 00000000..faab1f3d Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step7_select_gmail.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step8_enable.png b/docs/images/knowledge-base/gmail_oauth_app/step8_enable.png new file mode 100644 index 00000000..00163848 Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step8_enable.png differ diff --git a/docs/images/knowledge-base/gmail_oauth_app/step9_oauth_screen.png b/docs/images/knowledge-base/gmail_oauth_app/step9_oauth_screen.png new file mode 100644 index 00000000..f4d82abf Binary files /dev/null and b/docs/images/knowledge-base/gmail_oauth_app/step9_oauth_screen.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/scope1_oauth.png b/docs/images/knowledge-base/google_calendar_oauth_app/scope1_oauth.png new file mode 100644 index 00000000..9274f1bf Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/scope1_oauth.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/scope2_data_access.png b/docs/images/knowledge-base/google_calendar_oauth_app/scope2_data_access.png new file mode 100644 index 00000000..b978d87a Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/scope2_data_access.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/scope3_add_scopes.png b/docs/images/knowledge-base/google_calendar_oauth_app/scope3_add_scopes.png new file mode 100644 index 00000000..cbb5f918 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/scope3_add_scopes.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step10_info.png b/docs/images/knowledge-base/google_calendar_oauth_app/step10_info.png new file mode 100644 index 00000000..6eb9509b Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step10_info.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step11_CreateCredentials.png b/docs/images/knowledge-base/google_calendar_oauth_app/step11_CreateCredentials.png new file mode 100644 index 00000000..b60157cb Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step11_CreateCredentials.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step12_OAuthClientID.png b/docs/images/knowledge-base/google_calendar_oauth_app/step12_OAuthClientID.png new file mode 100644 index 00000000..f083c113 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step12_OAuthClientID.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step13_WebApplication.png b/docs/images/knowledge-base/google_calendar_oauth_app/step13_WebApplication.png new file mode 100644 index 00000000..66efc323 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step13_WebApplication.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step14_redirect_uri.png b/docs/images/knowledge-base/google_calendar_oauth_app/step14_redirect_uri.png new file mode 100644 index 00000000..fbaf9949 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step14_redirect_uri.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step15_credentials.png b/docs/images/knowledge-base/google_calendar_oauth_app/step15_credentials.png new file mode 100644 index 00000000..aa1d161a Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step15_credentials.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step1_box.png b/docs/images/knowledge-base/google_calendar_oauth_app/step1_box.png new file mode 100644 index 00000000..9a506189 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step1_box.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step2_new_project.png b/docs/images/knowledge-base/google_calendar_oauth_app/step2_new_project.png new file mode 100644 index 00000000..cde07c66 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step2_new_project.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step3_create.png b/docs/images/knowledge-base/google_calendar_oauth_app/step3_create.png new file mode 100644 index 00000000..85cdd218 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step3_create.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step4_select_project.png b/docs/images/knowledge-base/google_calendar_oauth_app/step4_select_project.png new file mode 100644 index 00000000..568f62f5 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step4_select_project.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step5_apis_and_services.png b/docs/images/knowledge-base/google_calendar_oauth_app/step5_apis_and_services.png new file mode 100644 index 00000000..5911f07b Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step5_apis_and_services.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step6_enable_api_services.png b/docs/images/knowledge-base/google_calendar_oauth_app/step6_enable_api_services.png new file mode 100644 index 00000000..409a42d4 Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step6_enable_api_services.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step7_select_calendar.png b/docs/images/knowledge-base/google_calendar_oauth_app/step7_select_calendar.png new file mode 100644 index 00000000..b3fc6c1f Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step7_select_calendar.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step8_enable.png b/docs/images/knowledge-base/google_calendar_oauth_app/step8_enable.png new file mode 100644 index 00000000..100348eb Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step8_enable.png differ diff --git a/docs/images/knowledge-base/google_calendar_oauth_app/step9_oauth_screen.png b/docs/images/knowledge-base/google_calendar_oauth_app/step9_oauth_screen.png new file mode 100644 index 00000000..d87d17dd Binary files /dev/null and b/docs/images/knowledge-base/google_calendar_oauth_app/step9_oauth_screen.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/scope1_oauth.png b/docs/images/knowledge-base/google_docs_oauth_app/scope1_oauth.png new file mode 100644 index 00000000..9274f1bf Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/scope1_oauth.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/scope2_data_access.png b/docs/images/knowledge-base/google_docs_oauth_app/scope2_data_access.png new file mode 100644 index 00000000..b978d87a Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/scope2_data_access.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/scope3_add_scopes.png b/docs/images/knowledge-base/google_docs_oauth_app/scope3_add_scopes.png new file mode 100644 index 00000000..cbb5f918 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/scope3_add_scopes.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step10_info.png b/docs/images/knowledge-base/google_docs_oauth_app/step10_info.png new file mode 100644 index 00000000..6eb9509b Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step10_info.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step11_CreateCredentials.png b/docs/images/knowledge-base/google_docs_oauth_app/step11_CreateCredentials.png new file mode 100644 index 00000000..b60157cb Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step11_CreateCredentials.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step12_OAuthClientID.png b/docs/images/knowledge-base/google_docs_oauth_app/step12_OAuthClientID.png new file mode 100644 index 00000000..f083c113 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step12_OAuthClientID.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step13_WebApplication.png b/docs/images/knowledge-base/google_docs_oauth_app/step13_WebApplication.png new file mode 100644 index 00000000..66efc323 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step13_WebApplication.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step14_redirect_uri.png b/docs/images/knowledge-base/google_docs_oauth_app/step14_redirect_uri.png new file mode 100644 index 00000000..0992cd39 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step14_redirect_uri.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step15_credentials.png b/docs/images/knowledge-base/google_docs_oauth_app/step15_credentials.png new file mode 100644 index 00000000..aa1d161a Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step15_credentials.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step1_box.png b/docs/images/knowledge-base/google_docs_oauth_app/step1_box.png new file mode 100644 index 00000000..9a506189 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step1_box.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step2_new_project.png b/docs/images/knowledge-base/google_docs_oauth_app/step2_new_project.png new file mode 100644 index 00000000..cde07c66 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step2_new_project.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step3_create.png b/docs/images/knowledge-base/google_docs_oauth_app/step3_create.png new file mode 100644 index 00000000..85cdd218 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step3_create.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step4_select_project.png b/docs/images/knowledge-base/google_docs_oauth_app/step4_select_project.png new file mode 100644 index 00000000..568f62f5 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step4_select_project.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step5_apis_and_services.png b/docs/images/knowledge-base/google_docs_oauth_app/step5_apis_and_services.png new file mode 100644 index 00000000..5911f07b Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step5_apis_and_services.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step6_enable_api_services.png b/docs/images/knowledge-base/google_docs_oauth_app/step6_enable_api_services.png new file mode 100644 index 00000000..409a42d4 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step6_enable_api_services.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step7_select_docs.png b/docs/images/knowledge-base/google_docs_oauth_app/step7_select_docs.png new file mode 100644 index 00000000..fb7517e5 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step7_select_docs.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step8_enable.png b/docs/images/knowledge-base/google_docs_oauth_app/step8_enable.png new file mode 100644 index 00000000..c8c891ad Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step8_enable.png differ diff --git a/docs/images/knowledge-base/google_docs_oauth_app/step9_oauth_screen.png b/docs/images/knowledge-base/google_docs_oauth_app/step9_oauth_screen.png new file mode 100644 index 00000000..32b9d2c4 Binary files /dev/null and b/docs/images/knowledge-base/google_docs_oauth_app/step9_oauth_screen.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/scope1_oauth.png b/docs/images/knowledge-base/google_drive_oauth_app/scope1_oauth.png new file mode 100644 index 00000000..9274f1bf Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/scope1_oauth.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/scope2_data_access.png b/docs/images/knowledge-base/google_drive_oauth_app/scope2_data_access.png new file mode 100644 index 00000000..b978d87a Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/scope2_data_access.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/scope3_add_scopes.png b/docs/images/knowledge-base/google_drive_oauth_app/scope3_add_scopes.png new file mode 100644 index 00000000..cbb5f918 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/scope3_add_scopes.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step10_info.png b/docs/images/knowledge-base/google_drive_oauth_app/step10_info.png new file mode 100644 index 00000000..6eb9509b Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step10_info.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step11_CreateCredentials.png b/docs/images/knowledge-base/google_drive_oauth_app/step11_CreateCredentials.png new file mode 100644 index 00000000..b60157cb Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step11_CreateCredentials.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step12_OAuthClientID.png b/docs/images/knowledge-base/google_drive_oauth_app/step12_OAuthClientID.png new file mode 100644 index 00000000..f083c113 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step12_OAuthClientID.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step13_WebApplication.png b/docs/images/knowledge-base/google_drive_oauth_app/step13_WebApplication.png new file mode 100644 index 00000000..66efc323 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step13_WebApplication.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step14_redirect_uri.png b/docs/images/knowledge-base/google_drive_oauth_app/step14_redirect_uri.png new file mode 100644 index 00000000..816da130 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step14_redirect_uri.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step15_credentials.png b/docs/images/knowledge-base/google_drive_oauth_app/step15_credentials.png new file mode 100644 index 00000000..aa1d161a Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step15_credentials.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step1_box.png b/docs/images/knowledge-base/google_drive_oauth_app/step1_box.png new file mode 100644 index 00000000..9a506189 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step1_box.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step2_new_project.png b/docs/images/knowledge-base/google_drive_oauth_app/step2_new_project.png new file mode 100644 index 00000000..cde07c66 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step2_new_project.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step3_create.png b/docs/images/knowledge-base/google_drive_oauth_app/step3_create.png new file mode 100644 index 00000000..85cdd218 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step3_create.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step4_select_project.png b/docs/images/knowledge-base/google_drive_oauth_app/step4_select_project.png new file mode 100644 index 00000000..568f62f5 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step4_select_project.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step5_apis_and_services.png b/docs/images/knowledge-base/google_drive_oauth_app/step5_apis_and_services.png new file mode 100644 index 00000000..5911f07b Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step5_apis_and_services.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step6_enable_api_services.png b/docs/images/knowledge-base/google_drive_oauth_app/step6_enable_api_services.png new file mode 100644 index 00000000..409a42d4 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step6_enable_api_services.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step7_select_drive.png b/docs/images/knowledge-base/google_drive_oauth_app/step7_select_drive.png new file mode 100644 index 00000000..f5c56fde Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step7_select_drive.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step8_enable.png b/docs/images/knowledge-base/google_drive_oauth_app/step8_enable.png new file mode 100644 index 00000000..ac1aab25 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step8_enable.png differ diff --git a/docs/images/knowledge-base/google_drive_oauth_app/step9_oauth_screen.png b/docs/images/knowledge-base/google_drive_oauth_app/step9_oauth_screen.png new file mode 100644 index 00000000..3103f8d5 Binary files /dev/null and b/docs/images/knowledge-base/google_drive_oauth_app/step9_oauth_screen.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/scope1_oauth.png b/docs/images/knowledge-base/google_sheets_oauth_app/scope1_oauth.png new file mode 100644 index 00000000..9274f1bf Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/scope1_oauth.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/scope2_data_access.png b/docs/images/knowledge-base/google_sheets_oauth_app/scope2_data_access.png new file mode 100644 index 00000000..b978d87a Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/scope2_data_access.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/scope3_add_scopes.png b/docs/images/knowledge-base/google_sheets_oauth_app/scope3_add_scopes.png new file mode 100644 index 00000000..cbb5f918 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/scope3_add_scopes.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step10_info.png b/docs/images/knowledge-base/google_sheets_oauth_app/step10_info.png new file mode 100644 index 00000000..6eb9509b Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step10_info.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step11_CreateCredentials.png b/docs/images/knowledge-base/google_sheets_oauth_app/step11_CreateCredentials.png new file mode 100644 index 00000000..b60157cb Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step11_CreateCredentials.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step12_OAuthClientID.png b/docs/images/knowledge-base/google_sheets_oauth_app/step12_OAuthClientID.png new file mode 100644 index 00000000..f083c113 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step12_OAuthClientID.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step13_WebApplication.png b/docs/images/knowledge-base/google_sheets_oauth_app/step13_WebApplication.png new file mode 100644 index 00000000..66efc323 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step13_WebApplication.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step14_redirect_uri.png b/docs/images/knowledge-base/google_sheets_oauth_app/step14_redirect_uri.png new file mode 100644 index 00000000..d84c0be4 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step14_redirect_uri.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step15_credentials.png b/docs/images/knowledge-base/google_sheets_oauth_app/step15_credentials.png new file mode 100644 index 00000000..aa1d161a Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step15_credentials.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step1_box.png b/docs/images/knowledge-base/google_sheets_oauth_app/step1_box.png new file mode 100644 index 00000000..9a506189 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step1_box.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step2_new_project.png b/docs/images/knowledge-base/google_sheets_oauth_app/step2_new_project.png new file mode 100644 index 00000000..cde07c66 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step2_new_project.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step3_create.png b/docs/images/knowledge-base/google_sheets_oauth_app/step3_create.png new file mode 100644 index 00000000..85cdd218 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step3_create.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step4_select_project.png b/docs/images/knowledge-base/google_sheets_oauth_app/step4_select_project.png new file mode 100644 index 00000000..568f62f5 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step4_select_project.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step5_apis_and_services.png b/docs/images/knowledge-base/google_sheets_oauth_app/step5_apis_and_services.png new file mode 100644 index 00000000..5911f07b Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step5_apis_and_services.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step6_enable_api_services.png b/docs/images/knowledge-base/google_sheets_oauth_app/step6_enable_api_services.png new file mode 100644 index 00000000..409a42d4 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step6_enable_api_services.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step7_select_sheets.png b/docs/images/knowledge-base/google_sheets_oauth_app/step7_select_sheets.png new file mode 100644 index 00000000..66bfc5d0 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step7_select_sheets.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step8_enable.png b/docs/images/knowledge-base/google_sheets_oauth_app/step8_enable.png new file mode 100644 index 00000000..2643c919 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step8_enable.png differ diff --git a/docs/images/knowledge-base/google_sheets_oauth_app/step9_oauth_screen.png b/docs/images/knowledge-base/google_sheets_oauth_app/step9_oauth_screen.png new file mode 100644 index 00000000..ce72d5a2 Binary files /dev/null and b/docs/images/knowledge-base/google_sheets_oauth_app/step9_oauth_screen.png differ diff --git a/docs/images/knowledge-base/hubspot_oauth/step1_app.png b/docs/images/knowledge-base/hubspot_oauth/step1_app.png new file mode 100644 index 00000000..eb7a013b Binary files /dev/null and b/docs/images/knowledge-base/hubspot_oauth/step1_app.png differ diff --git a/docs/images/knowledge-base/hubspot_oauth/step2_oauth.png b/docs/images/knowledge-base/hubspot_oauth/step2_oauth.png new file mode 100644 index 00000000..0cb01f3a Binary files /dev/null and b/docs/images/knowledge-base/hubspot_oauth/step2_oauth.png differ diff --git a/docs/images/knowledge-base/hubspot_oauth/step3_scopes.png b/docs/images/knowledge-base/hubspot_oauth/step3_scopes.png new file mode 100644 index 00000000..fd735259 Binary files /dev/null and b/docs/images/knowledge-base/hubspot_oauth/step3_scopes.png differ diff --git a/docs/images/knowledge-base/kiro/Kiro.png b/docs/images/knowledge-base/kiro/Kiro.png new file mode 100644 index 00000000..c55af328 Binary files /dev/null and b/docs/images/knowledge-base/kiro/Kiro.png differ diff --git a/docs/images/knowledge-base/kiro/step1.png b/docs/images/knowledge-base/kiro/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/kiro/step1.png differ diff --git a/docs/images/knowledge-base/kiro/step2.png b/docs/images/knowledge-base/kiro/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/kiro/step2.png differ diff --git a/docs/images/knowledge-base/kiro/step3.png b/docs/images/knowledge-base/kiro/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/kiro/step3.png differ diff --git a/docs/images/knowledge-base/kiro/step4.png b/docs/images/knowledge-base/kiro/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/kiro/step4.png differ diff --git a/docs/images/knowledge-base/kiro/step5.png b/docs/images/knowledge-base/kiro/step5.png new file mode 100644 index 00000000..61e784c2 Binary files /dev/null and b/docs/images/knowledge-base/kiro/step5.png differ diff --git a/docs/images/knowledge-base/kiro/step6.png b/docs/images/knowledge-base/kiro/step6.png new file mode 100644 index 00000000..07f185c3 Binary files /dev/null and b/docs/images/knowledge-base/kiro/step6.png differ diff --git a/docs/images/knowledge-base/kiro/step7.jpg b/docs/images/knowledge-base/kiro/step7.jpg new file mode 100644 index 00000000..454d3605 Binary files /dev/null and b/docs/images/knowledge-base/kiro/step7.jpg differ diff --git a/docs/images/knowledge-base/linkedin_oauth_app/step1.png b/docs/images/knowledge-base/linkedin_oauth_app/step1.png new file mode 100644 index 00000000..754efd3f Binary files /dev/null and b/docs/images/knowledge-base/linkedin_oauth_app/step1.png differ diff --git a/docs/images/knowledge-base/linkedin_oauth_app/step2.png b/docs/images/knowledge-base/linkedin_oauth_app/step2.png new file mode 100644 index 00000000..efa78b4f Binary files /dev/null and b/docs/images/knowledge-base/linkedin_oauth_app/step2.png differ diff --git a/docs/images/knowledge-base/linkedin_oauth_app/step3.png b/docs/images/knowledge-base/linkedin_oauth_app/step3.png new file mode 100644 index 00000000..0fd723f9 Binary files /dev/null and b/docs/images/knowledge-base/linkedin_oauth_app/step3.png differ diff --git a/docs/images/knowledge-base/monday_oauth_app/monday_step1_create.png b/docs/images/knowledge-base/monday_oauth_app/monday_step1_create.png new file mode 100644 index 00000000..68e80cbd Binary files /dev/null and b/docs/images/knowledge-base/monday_oauth_app/monday_step1_create.png differ diff --git a/docs/images/knowledge-base/monday_oauth_app/monday_step1_cred.png b/docs/images/knowledge-base/monday_oauth_app/monday_step1_cred.png new file mode 100644 index 00000000..c58dfc21 Binary files /dev/null and b/docs/images/knowledge-base/monday_oauth_app/monday_step1_cred.png differ diff --git a/docs/images/knowledge-base/monday_oauth_app/monday_step2_redirect.png b/docs/images/knowledge-base/monday_oauth_app/monday_step2_redirect.png new file mode 100644 index 00000000..d86eb5d0 Binary files /dev/null and b/docs/images/knowledge-base/monday_oauth_app/monday_step2_redirect.png differ diff --git a/docs/images/knowledge-base/monday_oauth_app/monday_step2_scope.png b/docs/images/knowledge-base/monday_oauth_app/monday_step2_scope.png new file mode 100644 index 00000000..d1212bc5 Binary files /dev/null and b/docs/images/knowledge-base/monday_oauth_app/monday_step2_scope.png differ diff --git a/docs/images/knowledge-base/monday_oauth_app/monday_step3_live.png b/docs/images/knowledge-base/monday_oauth_app/monday_step3_live.png new file mode 100644 index 00000000..a0f47113 Binary files /dev/null and b/docs/images/knowledge-base/monday_oauth_app/monday_step3_live.png differ diff --git a/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_1_callbackurl.png b/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_1_callbackurl.png new file mode 100644 index 00000000..a2005140 Binary files /dev/null and b/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_1_callbackurl.png differ diff --git a/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_2_client.png b/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_2_client.png new file mode 100644 index 00000000..e2b81492 Binary files /dev/null and b/docs/images/knowledge-base/moneybird_oauth_app/moneybird_step1_2_client.png differ diff --git a/docs/images/knowledge-base/n8n/n8n.png b/docs/images/knowledge-base/n8n/n8n.png new file mode 100644 index 00000000..ac53da30 Binary files /dev/null and b/docs/images/knowledge-base/n8n/n8n.png differ diff --git a/docs/images/knowledge-base/n8n/step1.png b/docs/images/knowledge-base/n8n/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step1.png differ diff --git a/docs/images/knowledge-base/n8n/step10.png b/docs/images/knowledge-base/n8n/step10.png new file mode 100644 index 00000000..b42a983c Binary files /dev/null and b/docs/images/knowledge-base/n8n/step10.png differ diff --git a/docs/images/knowledge-base/n8n/step11.png b/docs/images/knowledge-base/n8n/step11.png new file mode 100644 index 00000000..99bcf0dd Binary files /dev/null and b/docs/images/knowledge-base/n8n/step11.png differ diff --git a/docs/images/knowledge-base/n8n/step12.png b/docs/images/knowledge-base/n8n/step12.png new file mode 100644 index 00000000..f8bfd314 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step12.png differ diff --git a/docs/images/knowledge-base/n8n/step13.png b/docs/images/knowledge-base/n8n/step13.png new file mode 100644 index 00000000..9ffcf235 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step13.png differ diff --git a/docs/images/knowledge-base/n8n/step14.png b/docs/images/knowledge-base/n8n/step14.png new file mode 100644 index 00000000..9fd2f88b Binary files /dev/null and b/docs/images/knowledge-base/n8n/step14.png differ diff --git a/docs/images/knowledge-base/n8n/step15.png b/docs/images/knowledge-base/n8n/step15.png new file mode 100644 index 00000000..7a03ee30 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step15.png differ diff --git a/docs/images/knowledge-base/n8n/step2.png b/docs/images/knowledge-base/n8n/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/n8n/step2.png differ diff --git a/docs/images/knowledge-base/n8n/step3.png b/docs/images/knowledge-base/n8n/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/n8n/step3.png differ diff --git a/docs/images/knowledge-base/n8n/step4.png b/docs/images/knowledge-base/n8n/step4.png new file mode 100644 index 00000000..87e6cbd5 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step4.png differ diff --git a/docs/images/knowledge-base/n8n/step5.png b/docs/images/knowledge-base/n8n/step5.png new file mode 100644 index 00000000..ec833ade Binary files /dev/null and b/docs/images/knowledge-base/n8n/step5.png differ diff --git a/docs/images/knowledge-base/n8n/step6.png b/docs/images/knowledge-base/n8n/step6.png new file mode 100644 index 00000000..90f1a028 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step6.png differ diff --git a/docs/images/knowledge-base/n8n/step7.png b/docs/images/knowledge-base/n8n/step7.png new file mode 100644 index 00000000..291e7fa4 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step7.png differ diff --git a/docs/images/knowledge-base/n8n/step8.png b/docs/images/knowledge-base/n8n/step8.png new file mode 100644 index 00000000..144dbc02 Binary files /dev/null and b/docs/images/knowledge-base/n8n/step8.png differ diff --git a/docs/images/knowledge-base/n8n/step9.png b/docs/images/knowledge-base/n8n/step9.png new file mode 100644 index 00000000..a1c60e5a Binary files /dev/null and b/docs/images/knowledge-base/n8n/step9.png differ diff --git a/docs/images/knowledge-base/onboarding/gmail_oauth.png b/docs/images/knowledge-base/onboarding/gmail_oauth.png new file mode 100644 index 00000000..235bc4c2 Binary files /dev/null and b/docs/images/knowledge-base/onboarding/gmail_oauth.png differ diff --git a/docs/images/knowledge-base/onboarding/klavis_manage_instances.png b/docs/images/knowledge-base/onboarding/klavis_manage_instances.png new file mode 100644 index 00000000..6afc0aa9 Binary files /dev/null and b/docs/images/knowledge-base/onboarding/klavis_manage_instances.png differ diff --git a/docs/images/knowledge-base/onboarding/step1_choose_mcp_server.png b/docs/images/knowledge-base/onboarding/step1_choose_mcp_server.png new file mode 100644 index 00000000..95a43e6b Binary files /dev/null and b/docs/images/knowledge-base/onboarding/step1_choose_mcp_server.png differ diff --git a/docs/images/knowledge-base/onboarding/step2_config_mcp_Server.png b/docs/images/knowledge-base/onboarding/step2_config_mcp_Server.png new file mode 100644 index 00000000..3cdcc316 Binary files /dev/null and b/docs/images/knowledge-base/onboarding/step2_config_mcp_Server.png differ diff --git a/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step1_register.png b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step1_register.png new file mode 100644 index 00000000..363285be Binary files /dev/null and b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step1_register.png differ diff --git a/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step2_scopes.png b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step2_scopes.png new file mode 100644 index 00000000..5ef91ad8 Binary files /dev/null and b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step2_scopes.png differ diff --git a/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_clientID.png b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_clientID.png new file mode 100644 index 00000000..bafd10da Binary files /dev/null and b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_clientID.png differ diff --git a/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_secret.png b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_secret.png new file mode 100644 index 00000000..2fa81c09 Binary files /dev/null and b/docs/images/knowledge-base/onedrive_oauth_app/onedrive_step3_secret.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step10_create_app_dialog.png b/docs/images/knowledge-base/quickbooks_oauth_app/step10_create_app_dialog.png new file mode 100644 index 00000000..5ec658aa Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step10_create_app_dialog.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step11_app_basic_info.png b/docs/images/knowledge-base/quickbooks_oauth_app/step11_app_basic_info.png new file mode 100644 index 00000000..f84466bf Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step11_app_basic_info.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step13_permissions_setup.png b/docs/images/knowledge-base/quickbooks_oauth_app/step13_permissions_setup.png new file mode 100644 index 00000000..79290c69 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step13_permissions_setup.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step14_permissions_confirmed.png b/docs/images/knowledge-base/quickbooks_oauth_app/step14_permissions_confirmed.png new file mode 100644 index 00000000..aa2d3e62 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step14_permissions_confirmed.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step15_app_created_success.png b/docs/images/knowledge-base/quickbooks_oauth_app/step15_app_created_success.png new file mode 100644 index 00000000..5b76b981 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step15_app_created_success.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step16_keys_and_credentials.png b/docs/images/knowledge-base/quickbooks_oauth_app/step16_keys_and_credentials.png new file mode 100644 index 00000000..84eb79e0 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step16_keys_and_credentials.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step17_test_api_playground.png b/docs/images/knowledge-base/quickbooks_oauth_app/step17_test_api_playground.png new file mode 100644 index 00000000..7065a873 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step17_test_api_playground.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step1_homepage.png b/docs/images/knowledge-base/quickbooks_oauth_app/step1_homepage.png new file mode 100644 index 00000000..1392c2cc Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step1_homepage.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step2_workspaces.png b/docs/images/knowledge-base/quickbooks_oauth_app/step2_workspaces.png new file mode 100644 index 00000000..7b260583 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step2_workspaces.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step3_create_workspace_dialog.png b/docs/images/knowledge-base/quickbooks_oauth_app/step3_create_workspace_dialog.png new file mode 100644 index 00000000..be18ba83 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step3_create_workspace_dialog.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step4_create_workspace_form.png b/docs/images/knowledge-base/quickbooks_oauth_app/step4_create_workspace_form.png new file mode 100644 index 00000000..c4481f56 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step4_create_workspace_form.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step5-company-info-form.png b/docs/images/knowledge-base/quickbooks_oauth_app/step5-company-info-form.png new file mode 100644 index 00000000..f06663a1 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step5-company-info-form.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step7-contact-info-form.png b/docs/images/knowledge-base/quickbooks_oauth_app/step7-contact-info-form.png new file mode 100644 index 00000000..8b0c2182 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step7-contact-info-form.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step8-contact-info-filled.png b/docs/images/knowledge-base/quickbooks_oauth_app/step8-contact-info-filled.png new file mode 100644 index 00000000..68cc0291 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step8-contact-info-filled.png differ diff --git a/docs/images/knowledge-base/quickbooks_oauth_app/step9_workspace_dashboard.png b/docs/images/knowledge-base/quickbooks_oauth_app/step9_workspace_dashboard.png new file mode 100644 index 00000000..39c48c79 Binary files /dev/null and b/docs/images/knowledge-base/quickbooks_oauth_app/step9_workspace_dashboard.png differ diff --git a/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step1_login.png b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step1_login.png new file mode 100644 index 00000000..1a606184 Binary files /dev/null and b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step1_login.png differ diff --git a/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step2_create_app.png b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step2_create_app.png new file mode 100644 index 00000000..a7d76318 Binary files /dev/null and b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step2_create_app.png differ diff --git a/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step3_basic_info.png b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step3_basic_info.png new file mode 100644 index 00000000..37255dd8 Binary files /dev/null and b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step3_basic_info.png differ diff --git a/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_connected_apps.png b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_connected_apps.png new file mode 100644 index 00000000..ba9fd305 Binary files /dev/null and b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_connected_apps.png differ diff --git a/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_consumer_key_and_secret.png b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_consumer_key_and_secret.png new file mode 100644 index 00000000..e18a8eb8 Binary files /dev/null and b/docs/images/knowledge-base/salesforce_oauth_app/salesforce_step4_view_consumer_key_and_secret.png differ diff --git a/docs/images/knowledge-base/slack_oauth_app/step1_AppCreation.png b/docs/images/knowledge-base/slack_oauth_app/step1_AppCreation.png new file mode 100644 index 00000000..85552b5c Binary files /dev/null and b/docs/images/knowledge-base/slack_oauth_app/step1_AppCreation.png differ diff --git a/docs/images/knowledge-base/slack_oauth_app/step2_redirect.png b/docs/images/knowledge-base/slack_oauth_app/step2_redirect.png new file mode 100644 index 00000000..68fc4336 Binary files /dev/null and b/docs/images/knowledge-base/slack_oauth_app/step2_redirect.png differ diff --git a/docs/images/knowledge-base/slack_oauth_app/step3_scopes.png b/docs/images/knowledge-base/slack_oauth_app/step3_scopes.png new file mode 100644 index 00000000..b0bd35ae Binary files /dev/null and b/docs/images/knowledge-base/slack_oauth_app/step3_scopes.png differ diff --git a/docs/images/knowledge-base/slack_oauth_app/step4_distribution.png b/docs/images/knowledge-base/slack_oauth_app/step4_distribution.png new file mode 100644 index 00000000..e78d47ab Binary files /dev/null and b/docs/images/knowledge-base/slack_oauth_app/step4_distribution.png differ diff --git a/docs/images/knowledge-base/vs_code/VS_Code.png b/docs/images/knowledge-base/vs_code/VS_Code.png new file mode 100644 index 00000000..5e24f325 Binary files /dev/null and b/docs/images/knowledge-base/vs_code/VS_Code.png differ diff --git a/docs/images/knowledge-base/vs_code/step1.png b/docs/images/knowledge-base/vs_code/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step1.png differ diff --git a/docs/images/knowledge-base/vs_code/step2.png b/docs/images/knowledge-base/vs_code/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step2.png differ diff --git a/docs/images/knowledge-base/vs_code/step3.png b/docs/images/knowledge-base/vs_code/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step3.png differ diff --git a/docs/images/knowledge-base/vs_code/step4.png b/docs/images/knowledge-base/vs_code/step4.png new file mode 100644 index 00000000..8b3bdd4f Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step4.png differ diff --git a/docs/images/knowledge-base/vs_code/step5.png b/docs/images/knowledge-base/vs_code/step5.png new file mode 100644 index 00000000..78b0bf07 Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step5.png differ diff --git a/docs/images/knowledge-base/vs_code/step6.png b/docs/images/knowledge-base/vs_code/step6.png new file mode 100644 index 00000000..178d2039 Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step6.png differ diff --git a/docs/images/knowledge-base/vs_code/step7.png b/docs/images/knowledge-base/vs_code/step7.png new file mode 100644 index 00000000..67ec860e Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step7.png differ diff --git a/docs/images/knowledge-base/vs_code/step8.png b/docs/images/knowledge-base/vs_code/step8.png new file mode 100644 index 00000000..28dd2405 Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step8.png differ diff --git a/docs/images/knowledge-base/vs_code/step9.jpg b/docs/images/knowledge-base/vs_code/step9.jpg new file mode 100644 index 00000000..fe74825e Binary files /dev/null and b/docs/images/knowledge-base/vs_code/step9.jpg differ diff --git a/docs/images/knowledge-base/windsurf/step1.png b/docs/images/knowledge-base/windsurf/step1.png new file mode 100644 index 00000000..dea8be98 Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step1.png differ diff --git a/docs/images/knowledge-base/windsurf/step10.JPG b/docs/images/knowledge-base/windsurf/step10.JPG new file mode 100644 index 00000000..207b6f65 Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step10.JPG differ diff --git a/docs/images/knowledge-base/windsurf/step2.png b/docs/images/knowledge-base/windsurf/step2.png new file mode 100644 index 00000000..8b3efabb Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step2.png differ diff --git a/docs/images/knowledge-base/windsurf/step3.png b/docs/images/knowledge-base/windsurf/step3.png new file mode 100644 index 00000000..c2dbce7a Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step3.png differ diff --git a/docs/images/knowledge-base/windsurf/step4.png b/docs/images/knowledge-base/windsurf/step4.png new file mode 100644 index 00000000..ed754669 Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step4.png differ diff --git a/docs/images/knowledge-base/windsurf/step5.png b/docs/images/knowledge-base/windsurf/step5.png new file mode 100644 index 00000000..3ceb4cfd Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step5.png differ diff --git a/docs/images/knowledge-base/windsurf/step6.png b/docs/images/knowledge-base/windsurf/step6.png new file mode 100644 index 00000000..c7e65fec Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step6.png differ diff --git a/docs/images/knowledge-base/windsurf/step7.png b/docs/images/knowledge-base/windsurf/step7.png new file mode 100644 index 00000000..5297488f Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step7.png differ diff --git a/docs/images/knowledge-base/windsurf/step8.png b/docs/images/knowledge-base/windsurf/step8.png new file mode 100644 index 00000000..22771be0 Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step8.png differ diff --git a/docs/images/knowledge-base/windsurf/step9.png b/docs/images/knowledge-base/windsurf/step9.png new file mode 100644 index 00000000..c6e61096 Binary files /dev/null and b/docs/images/knowledge-base/windsurf/step9.png differ diff --git a/docs/images/knowledge-base/windsurf/windsurf.png b/docs/images/knowledge-base/windsurf/windsurf.png new file mode 100644 index 00000000..e73db29d Binary files /dev/null and b/docs/images/knowledge-base/windsurf/windsurf.png differ diff --git a/docs/images/knowledge-base/xero_oauth_app/xero_step1_login.png b/docs/images/knowledge-base/xero_oauth_app/xero_step1_login.png new file mode 100644 index 00000000..bd0b7aae Binary files /dev/null and b/docs/images/knowledge-base/xero_oauth_app/xero_step1_login.png differ diff --git a/docs/images/knowledge-base/xero_oauth_app/xero_step2_create_app.png b/docs/images/knowledge-base/xero_oauth_app/xero_step2_create_app.png new file mode 100644 index 00000000..44f8fe41 Binary files /dev/null and b/docs/images/knowledge-base/xero_oauth_app/xero_step2_create_app.png differ diff --git a/docs/images/knowledge-base/xero_oauth_app/xero_step3_connection_config.png b/docs/images/knowledge-base/xero_oauth_app/xero_step3_connection_config.png new file mode 100644 index 00000000..e88aab75 Binary files /dev/null and b/docs/images/knowledge-base/xero_oauth_app/xero_step3_connection_config.png differ diff --git a/docs/images/linear.svg b/docs/images/linear.svg new file mode 100644 index 00000000..6f3f028f --- /dev/null +++ b/docs/images/linear.svg @@ -0,0 +1,5 @@ + + + diff --git a/docs/images/linkedin.svg b/docs/images/linkedin.svg new file mode 100644 index 00000000..a7add3f3 --- /dev/null +++ b/docs/images/linkedin.svg @@ -0,0 +1 @@ + diff --git a/docs/images/logo/cover.png b/docs/images/logo/cover.png new file mode 100644 index 00000000..508b7bd8 Binary files /dev/null and b/docs/images/logo/cover.png differ diff --git a/docs/images/logo/favicon.png b/docs/images/logo/favicon.png new file mode 100644 index 00000000..6e01d0e9 Binary files /dev/null and b/docs/images/logo/favicon.png differ diff --git a/docs/images/logo/light.png b/docs/images/logo/light.png new file mode 100644 index 00000000..93351eb2 Binary files /dev/null and b/docs/images/logo/light.png differ diff --git a/docs/images/mcp-server.png b/docs/images/mcp-server.png new file mode 100644 index 00000000..3631fe32 Binary files /dev/null and b/docs/images/mcp-server.png differ diff --git a/docs/images/memo.webp b/docs/images/memo.webp new file mode 100644 index 00000000..ed5fa18b Binary files /dev/null and b/docs/images/memo.webp differ diff --git a/docs/images/metabase.svg b/docs/images/metabase.svg new file mode 100644 index 00000000..8fb1251b --- /dev/null +++ b/docs/images/metabase.svg @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/microsoft_teams.svg b/docs/images/microsoft_teams.svg new file mode 100644 index 00000000..3409e6cf --- /dev/null +++ b/docs/images/microsoft_teams.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/mixpanel.svg b/docs/images/mixpanel.svg new file mode 100644 index 00000000..0a983dfc --- /dev/null +++ b/docs/images/mixpanel.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/monday.svg b/docs/images/monday.svg new file mode 100644 index 00000000..795d7c7f --- /dev/null +++ b/docs/images/monday.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/docs/images/monday.webp b/docs/images/monday.webp new file mode 100644 index 00000000..817976bf Binary files /dev/null and b/docs/images/monday.webp differ diff --git a/docs/images/moneybird.webp b/docs/images/moneybird.webp new file mode 100644 index 00000000..2445c361 Binary files /dev/null and b/docs/images/moneybird.webp differ diff --git a/docs/images/motion.svg b/docs/images/motion.svg new file mode 100644 index 00000000..a66c121a --- /dev/null +++ b/docs/images/motion.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/n8n.svg b/docs/images/n8n.svg new file mode 100644 index 00000000..82f0a6da --- /dev/null +++ b/docs/images/n8n.svg @@ -0,0 +1 @@ +n8n \ No newline at end of file diff --git a/docs/images/notion.svg b/docs/images/notion.svg new file mode 100644 index 00000000..a4e62798 --- /dev/null +++ b/docs/images/notion.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/oauth/white_label.png b/docs/images/oauth/white_label.png new file mode 100644 index 00000000..7504bbf8 Binary files /dev/null and b/docs/images/oauth/white_label.png differ diff --git a/docs/images/onedrive.svg b/docs/images/onedrive.svg new file mode 100644 index 00000000..f7d7a6a6 --- /dev/null +++ b/docs/images/onedrive.svg @@ -0,0 +1 @@ +OfficeCore10_32x_24x_20x_16x_01-22-2019 \ No newline at end of file diff --git a/docs/images/openrouter.webp b/docs/images/openrouter.webp new file mode 100644 index 00000000..f55bd7f9 Binary files /dev/null and b/docs/images/openrouter.webp differ diff --git a/docs/images/outlook.svg b/docs/images/outlook.svg new file mode 100644 index 00000000..59d2daf3 --- /dev/null +++ b/docs/images/outlook.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/pagerduty.svg b/docs/images/pagerduty.svg new file mode 100644 index 00000000..32d2d60c --- /dev/null +++ b/docs/images/pagerduty.svg @@ -0,0 +1,18 @@ + + + + + + + + + + + + diff --git a/docs/images/partnership/fireworks-klavis.png b/docs/images/partnership/fireworks-klavis.png new file mode 100644 index 00000000..9ae06ac2 Binary files /dev/null and b/docs/images/partnership/fireworks-klavis.png differ diff --git a/docs/images/partnership/togetherai-klavis.png b/docs/images/partnership/togetherai-klavis.png new file mode 100644 index 00000000..ce1b818a Binary files /dev/null and b/docs/images/partnership/togetherai-klavis.png differ diff --git a/docs/images/pdf.svg b/docs/images/pdf.svg new file mode 100644 index 00000000..696a4c3d --- /dev/null +++ b/docs/images/pdf.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/docs/images/perplexity.svg b/docs/images/perplexity.svg new file mode 100644 index 00000000..f87f6fc6 --- /dev/null +++ b/docs/images/perplexity.svg @@ -0,0 +1,16 @@ + + + + + + + + + diff --git a/docs/images/pipedrive.svg b/docs/images/pipedrive.svg new file mode 100644 index 00000000..689c4cd3 --- /dev/null +++ b/docs/images/pipedrive.svg @@ -0,0 +1,23 @@ + + + + Pipedrive_letter_logo_light@1,5x + + + Created with Sketch. + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/plai.webp b/docs/images/plai.webp new file mode 100644 index 00000000..3e9e19bd Binary files /dev/null and b/docs/images/plai.webp differ diff --git a/docs/images/postgre.svg b/docs/images/postgre.svg new file mode 100644 index 00000000..f290ec44 --- /dev/null +++ b/docs/images/postgre.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/posthog.svg b/docs/images/posthog.svg new file mode 100644 index 00000000..410ad78a --- /dev/null +++ b/docs/images/posthog.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/quickbooks.svg b/docs/images/quickbooks.svg new file mode 100644 index 00000000..2b40d5b9 --- /dev/null +++ b/docs/images/quickbooks.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/resend.svg b/docs/images/resend.svg new file mode 100644 index 00000000..506dad83 --- /dev/null +++ b/docs/images/resend.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/images/salesforce.svg b/docs/images/salesforce.svg new file mode 100644 index 00000000..94e5b5a0 --- /dev/null +++ b/docs/images/salesforce.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/sdk/python.svg b/docs/images/sdk/python.svg new file mode 100644 index 00000000..269bbea5 --- /dev/null +++ b/docs/images/sdk/python.svg @@ -0,0 +1,265 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/sdk/typescript.svg b/docs/images/sdk/typescript.svg new file mode 100644 index 00000000..a46d53d4 --- /dev/null +++ b/docs/images/sdk/typescript.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/sendgrid.svg b/docs/images/sendgrid.svg new file mode 100644 index 00000000..e39de368 --- /dev/null +++ b/docs/images/sendgrid.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/servicenow.png b/docs/images/servicenow.png new file mode 100644 index 00000000..cd1b1be6 Binary files /dev/null and b/docs/images/servicenow.png differ diff --git a/docs/images/shopify.svg b/docs/images/shopify.svg new file mode 100644 index 00000000..084e0943 --- /dev/null +++ b/docs/images/shopify.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/slack.svg b/docs/images/slack.svg new file mode 100644 index 00000000..7339449e --- /dev/null +++ b/docs/images/slack.svg @@ -0,0 +1 @@ + diff --git a/docs/images/stripe.svg b/docs/images/stripe.svg new file mode 100644 index 00000000..df61834d --- /dev/null +++ b/docs/images/stripe.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/docs/images/supabase.svg b/docs/images/supabase.svg new file mode 100644 index 00000000..cf8e0f29 --- /dev/null +++ b/docs/images/supabase.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/docs/images/tavily.svg b/docs/images/tavily.svg new file mode 100644 index 00000000..ce155105 --- /dev/null +++ b/docs/images/tavily.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/unified_mcp.webp b/docs/images/unified_mcp.webp new file mode 100644 index 00000000..683ab025 Binary files /dev/null and b/docs/images/unified_mcp.webp differ diff --git a/docs/images/vercel.svg b/docs/images/vercel.svg new file mode 100644 index 00000000..72948d01 --- /dev/null +++ b/docs/images/vercel.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/images/vs_code.svg b/docs/images/vs_code.svg new file mode 100644 index 00000000..6cdccd87 --- /dev/null +++ b/docs/images/vs_code.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/web_search.webp b/docs/images/web_search.webp new file mode 100644 index 00000000..a4f76245 Binary files /dev/null and b/docs/images/web_search.webp differ diff --git a/docs/images/whatsapp.webp b/docs/images/whatsapp.webp new file mode 100644 index 00000000..1b2b4476 Binary files /dev/null and b/docs/images/whatsapp.webp differ diff --git a/docs/images/white-labeling.png b/docs/images/white-labeling.png new file mode 100644 index 00000000..c3a4b66a Binary files /dev/null and b/docs/images/white-labeling.png differ diff --git a/docs/images/windsurf.svg b/docs/images/windsurf.svg new file mode 100644 index 00000000..4df8f33c --- /dev/null +++ b/docs/images/windsurf.svg @@ -0,0 +1 @@ +Windsurf \ No newline at end of file diff --git a/docs/images/wordpress.svg b/docs/images/wordpress.svg new file mode 100644 index 00000000..72183e90 --- /dev/null +++ b/docs/images/wordpress.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/youtube.svg b/docs/images/youtube.svg new file mode 100644 index 00000000..50b99cd3 --- /dev/null +++ b/docs/images/youtube.svg @@ -0,0 +1,25 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/images/zendesk.svg b/docs/images/zendesk.svg new file mode 100644 index 00000000..1d1de411 --- /dev/null +++ b/docs/images/zendesk.svg @@ -0,0 +1 @@ + diff --git a/docs/installation.mdx b/docs/installation.mdx new file mode 100644 index 00000000..19da9822 --- /dev/null +++ b/docs/installation.mdx @@ -0,0 +1,26 @@ +--- +title: Installation +description: Install Klavis SDK or use REST API directly +icon: wrench +--- + +## SDK Installation + + +```bash Python +pip install klavis +``` + +```bash TypeScript +npm install klavis +``` + + +## REST API + +```bash +curl -X POST "https://api.klavis.ai/mcp-server/instance/create" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{"serverName": "Gmail", "userId": "user123"}' +``` diff --git a/docs/introduction.mdx b/docs/introduction.mdx new file mode 100644 index 00000000..020cfad0 --- /dev/null +++ b/docs/introduction.mdx @@ -0,0 +1,40 @@ +--- +title: "Introduction" +description: "MCP Integration for your AI Application" +icon: handshake +mode: "wide" +--- + +# What is Klavis? + +**Klavis AI is open source MCP integration layers that let AI agents use any tool reliably at any scale. You can use our API to automate workflows across single or multiple apps with managed authentications.** + +Klavis AI Bridge +Klavis AI Bridge + +With Klavis API, you can equip your AI agent with any tools, without the complexity of managing authentication, handling security, or dealing with context overload. + +## Get started + + + + Connect any integration in minutes + + + Explore available MCP servers + + + Progressive tool discovery across apps + + + REST endpoints and schemas + + diff --git a/docs/knowledge-base/introduction.mdx b/docs/knowledge-base/introduction.mdx new file mode 100644 index 00000000..82d59782 --- /dev/null +++ b/docs/knowledge-base/introduction.mdx @@ -0,0 +1,123 @@ +--- +title: "Introduction" +description: "A collection of answers to frequently asked questions about MCP" +--- + +## Getting Started + + + + A beginner's guide to creating your first MCP server with Gmail as an example. + + + Learn how to use your Gmail MCP server with OpenAI. + + + +## Use MCP Server + + + + Learn how to integrate Klavis MCP Servers with ChatGPT for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with Claude Code for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with Claude Desktop for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with Cline for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with Continue for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with Cursor IDE for enhanced AI coding experience. + + + Learn how to integrate Klavis MCP Servers with Gemini CLI for enhanced AI coding experience. + + + Learn how to integrate Klavis MCP Servers with Kiro for enhanced AI workflow experience. + + + Learn how to integrate Klavis MCP Servers with n8n for enhanced AI coding experience. + + + Learn how to integrate Klavis MCP Servers with VS Code IDE for enhanced AI coding experience. + + + Learn how to integrate Klavis MCP Servers with Windsurf IDE for enhanced AI coding experience. + + + +## OAuth App Setup + + + Complete reference of OAuth scopes required for each supported application + + + Step-by-step guide to setting up Airtable OAuth application + + + Step-by-step guide to setting up Asana OAuth application + + + Step-by-step guide to setting up Calendly OAuth application + + + Step-by-step guide to setting up Canva OAuth application + + + Step-by-step guide to setting up Discord OAuth application + + + Step-by-step guide to setting up Dropbox OAuth application + + + Step-by-step guide to setting up Figma OAuth application + + + Step-by-step guide to setting up Gmail OAuth application + + + Step-by-step guide to setting up Google Calendar OAuth application + + + Step-by-step guide to setting up Google Docs OAuth application + + + Step-by-step guide to setting up Google Drive OAuth application + + + Step-by-step guide to setting up Google Sheets OAuth application + + + Step-by-step guide to setting up HubSpot OAuth application + + + Step-by-step guide to setting up LinkedIn OAuth application + + + Step-by-step guide to setting up Monday.com OAuth application + + + Step-by-step guide to setting up Moneybird OAuth application + + + Step-by-step guide to setting up OneDrive OAuth application + + + Step-by-step guide to setting up QuickBooks OAuth application + + + Step-by-step guide to setting up Salesforce OAuth application + + + Step-by-step guide to setting up Slack OAuth application + + + Step-by-step guide to setting up Xero OAuth application + + \ No newline at end of file diff --git a/docs/knowledge-base/llm-based-development.mdx b/docs/knowledge-base/llm-based-development.mdx new file mode 100644 index 00000000..41f02e25 --- /dev/null +++ b/docs/knowledge-base/llm-based-development.mdx @@ -0,0 +1,25 @@ +--- +title: "LLM-based Development" +description: "Enhance your AI coding experience with LLM readable documentation." +--- + +## Overview + +Welcome to the guide on utilizing our specialized `llms.txt` files to enhance your LLM-based coding workflow. Whether you’re using Replit, Cursor, Claude Code, or any other AI coding tool, this page will assist you in leveraging our resources to debug and refine code generated by Klavis. + + +## Option 1: Copy-paste llms.txt + +Copy the [llms.txt](https://raw.githubusercontent.com/Klavis-AI/klavis/refs/heads/main/LLM.md) and paste it in the prompt for your coding assistant. + + + +## Option 2: Install the MCP server + +Open a terminal and run the following command to install the MCP server locally: + +```bash +npx mint-mcp add klavisai +``` + +Everything will be set up automatically! diff --git a/docs/knowledge-base/oauth_app/airtable.mdx b/docs/knowledge-base/oauth_app/airtable.mdx new file mode 100644 index 00000000..2f05df52 --- /dev/null +++ b/docs/knowledge-base/oauth_app/airtable.mdx @@ -0,0 +1,114 @@ +--- +title: "Setting Up Airtable OAuth App" +description: "Complete guide to creating and configuring a Airtable OAuth application" +--- + +## Prerequisites + +- Airtable Account + +## Step 1: Create Airtable OAuth Integration + +1. Visit [https://airtable.com/create/oauth](https://airtable.com/create/oauth) +2. Click **"Register new OAuth Integration"** + +OAuth Integration Creation + +## Step 2: Register an Integration + +1. Enter **"Name"** +2. Add redirect URLs: `https://api.klavis.ai/oauth/airtable/callback` +3. Click **"Register Integration"** + +Here is an example of Klavis AI OAuth app configuration: + +OAuth Settings Configuration + +Once registered, Airtable will generate your Client ID and Client Secret. You’ll need these for connecting Klavis AI. + +## Step 3: Request Scopes + + +Klavis Airtable MCP Server uses the following OAuth scopes: `data.records:read, data.records:write, data.recordComments:read, data.recordComments:write, schema.bases:read, schema.bases:write, user.email:read` + + +1. After registration, add required scopes. + +Product Access and Credentials + +2. Enter Required Info, Then click **"Save Changes"** + + +You have successfully created an Airtable OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 4: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Airtable OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Airtable **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/airtable/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Airtable OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/airtable/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Airtable OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Airtable, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Airtable OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.AIRTABLE, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Airtable OAuth Documentation](https://airtable.com/developers/web/guides/oauth-integrations) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/asana.mdx b/docs/knowledge-base/oauth_app/asana.mdx new file mode 100644 index 00000000..3e9764a4 --- /dev/null +++ b/docs/knowledge-base/oauth_app/asana.mdx @@ -0,0 +1,125 @@ +--- +title: "Setting Up Asana OAuth App" +description: "Complete guide to creating and configuring a Asana OAuth application" +--- + +## Prerequisites + +- Asana account (personal or business) +- Access to Asana Developer Portal + +## Step 1: Create Asana Developer Account + +1. Visit [https://asana.com/developers/](https://asana.com/developers) +2. Click **"Get started"** or **"Login"** if you already have an account +3. Sign in with your Asana account or create a new developer account + +Asana Developer Login + +## Step 2: Create a New App + +1. Once logged in, go to your settings -> **Apps**, and click **"Build new apps"** +2. Click **"Create new app"** under **"My apps"** section +3. Fill in the app details: + - **App name**: Your application name (e.g., your brand name) + - **Which best describes what your app will do?**: (Choose your preference) + - **Asana API Terms**: Agree to the terms and conditions + +Asana App Creation Form + +## Step 3: Configure OAuth Settings + +1. After creating the app, you'll see: +- **Client ID**: Copy this value +- **Client Secret**: Copy this value (keep it secure!) + +2. **Redirect URIs**: Add your callback URL: + - `https://api.klavis.ai/oauth/asana/callback` + +OAuth Settings Configuration + +## Step 4: Request Scopes + + +Klavis Asana MCP Server uses the following OAuth scopes: `goals:read,project_templates:read,projects:read,projects:write,projects:delete,stories:read,task_templates:read,tasks:read,tasks:write,tasks:delete,teams:read,users:read,workspaces:read,workspaces.typeahead:read` + + +1. Scroll to **"Permission scopes"** tab from **"Oauth"** menu +2. **Scopes**: Select the scopes your application needs: + +Request Scopes + + +You have successfully created an Asana OAuth application! You now have your Client ID, Client Secret, and Redirect Url ready for integration with Klavis AI. + + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Asana OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Asana **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/asana/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Asana OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/asana/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Asana OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Asana, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Asana OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.ASANA, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Asana OAuth Documentation](https://developers.asana.com/docs/oauth) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) diff --git a/docs/knowledge-base/oauth_app/calendly.mdx b/docs/knowledge-base/oauth_app/calendly.mdx new file mode 100644 index 00000000..2bd85529 --- /dev/null +++ b/docs/knowledge-base/oauth_app/calendly.mdx @@ -0,0 +1,120 @@ +--- +title: "Setting Up Calendly OAuth App" +description: "Complete guide to creating and configuring a Calendly OAuth application" +--- + +## Prerequisites + +- Calendly account (personal or business) +- Access to Calendly Developer Portal + +## Step 1: Create Calendly Developer Account + +1. Visit [https://developer.calendly.com/](https://developer.calendly.com/) +2. Click **"Sign In"** or **"Log In"** if you already have an account +3. Sign in with your Calendly account or create a new developer account + +Calendly Developer Login + +## Step 2: Create a New App + +1. Once logged in, go to **"Accounts"** -> **"My Apps"** +2. Click **"Create a new app"** +3. Choose **"Web App"** as the integration type +4. Fill in the app details: + - **Name of app**: Your application name (e.g., your brand name) + - **Kind of app**: Web/Native (depending on your need) + - **Environment Type**: Sandbox/Production (**Production** recommended) + - **Redirect URIs**: Add your callback URL: + - `https://api.klavis.ai/oauth/calendly/callback` + +Calendly App Creation Form + + +Normally, the redirect URI should be set to: `https://api.klavis.ai/oauth/calendly/callback` + + +## Step 3: Get Your Credentials + +After creating the app, you'll see: +- **Client ID**: Copy this value +- **Client Secret**: Generate and copy this value (keep it secure!) + +Get the Credentials + + +Calendly does not use traditional OAuth scopes. Once authenticated, your application has access to all API endpoints permitted by the user's subscription and role. + + +## (Optional) Step 4: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Calendly OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Calendly **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/calendly/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Calendly OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/calendly/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Calendly OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Calendly, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Calendly OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.CALENDLY, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Calendly Developer Documentation](https://developer.calendly.com/getting-started) +- [Calendly OAuth 2.0 Authentication Guide](https://developer.calendly.com/create-a-developer-account) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) +- [Calendly API Reference](https://developer.calendly.com/getting-started#access-requirements) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/canva.mdx b/docs/knowledge-base/oauth_app/canva.mdx new file mode 100644 index 00000000..8340c6de --- /dev/null +++ b/docs/knowledge-base/oauth_app/canva.mdx @@ -0,0 +1,150 @@ +--- +title: "Setting Up Canva OAuth App" +description: "Complete guide to creating and configuring a Canva OAuth application" +--- + +## Prerequisites + +- Canva account +- Multi-Factor Authentication (MFA) enabled on your Canva account + + +Make sure your account has a password. Set it up by going to Settings > Login > Password. + + +**Without password configured:** +Canva Settings Without Password + +**With password configured (showing MFA option):** +Canva Settings With Password and MFA + +## Step 1: Create Canva Developer Account & Integration + +1. Visit [https://developer.canva.com/](https://developer.canva.com/) and sign in with your Canva credentials +2. Navigate to **"Your integrations"** page +3. Click **"Create an integration"** and fill out the form: + - **Integration type**: Choose between "Public" (available to all users after review) or "Private" (team only) + - **Integration name**: Choose a descriptive name + - Accept Canva's Developer Terms + +## Step 2: Configure Integration Settings + +1. Under **"Configuration"** → **"Configure your integration"**, set the following: + - **Integration name**: Add your application name + - **Client ID**: Make note of this value for later use + - **Generate secret**: Click to generate and securely save your Client Secret + +Integration Configuration Settings + +## Step 3: Set Required Scopes + + +Klavis Canva MCP Server uses the following OAuth scopes: `app:read app:write asset:read asset:write brandtemplate:content:read brandtemplate:meta:read comment:read comment:write design:content:read design:content:write design:meta:read design:permission:read design:permission:write folder:read folder:write folder:permission:read folder:permission:write profile:read` + + +1. Under **"Scopes"** → **"Set the scopes"**, configure the required permissions as shown in the screenshot below: + +OAuth Scopes Configuration + +## Step 4: Configure Authentication & Redirect URLs + +1. Under **"Authentication"** → **"Add Authentication"**, add redirect URL: + - `https://api.klavis.ai/oauth/canva/callback` + +Authentication and Redirect URL Configuration + +## Step 5: Submit for Review (Public Integrations Only) + + +If you created a "Private" integration, you can skip this step. Private integrations are immediately available to your team. + + +For public integrations: +1. Complete all required configuration sections +2. Click **"Submit for Review"** +3. Wait for Canva's approval process + +Submit Integration for Review + +## Step 6: Integration Review Status + +Once submitted, your integration will show "In Review" status. You'll receive email notifications about the review progress, and you will be asked to complete a questionnaire in the Jira ticket from the email. + +Integration Review Email Questionnaire + + +You have successfully created a Canva OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 7: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Canva OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Canva **Client ID** and **Client Secret** from Step 2 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/canva/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Canva OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/canva/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Canva OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Canva, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Canva OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.CANVA, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Canva Connect API Documentation](https://www.canva.dev/docs/connect/) +- [Canva OAuth 2.0 Authentication Guide](https://www.canva.dev/docs/connect/authentication/) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) +- [Canva Setting up Multi-Factor Authentication](https://www.canva.com/help/login-verification/) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/discord.mdx b/docs/knowledge-base/oauth_app/discord.mdx new file mode 100644 index 00000000..be617580 --- /dev/null +++ b/docs/knowledge-base/oauth_app/discord.mdx @@ -0,0 +1,160 @@ +--- +title: "Setting Up Discord OAuth App" +description: "Complete guide to creating and configuring a Discord OAuth application" +--- + +## Prerequisites + +- Discord account +- Access to Discord Developer Portal + +## Step 1: Create Discord Developer Account + +1. Visit [https://discord.com/developers](https://discord.com/developers) +2. Click **"Get Started"** or **"Login"** if you already have an account +3. Sign in with your Discord account or create a new developer account + +Discord Developer Login + +## Step 2: Create a New App + +1. Once logged in, go to **"Applications"** in your developer dashboard +2. Click **"New Application"** to create a new app + +Create New Application + +3. Click on the **"App"** that you just created +4. Go to **"General Information"** section +5. Fill in the app details: + - **App name**: Your application name (e.g., your brand name) + - **App description**: Your application description + - **"App Icon"**: Upload 100x100px PNG (recommended) + - **"Tags"**: Add tags that describe your application + - **Privacy policy URL**: Your privacy policy URL + - **Terms of service URL**: Your terms of service URL + +General App Information + +## Step 3: Build A Bot + +1. Go to **"Bot"** section. +2. Get **Discord Token**: + - Click on **Reset Token** button under **TOKEN** tab + - Enter your **Discord Password** and press **Submit** + - New **Token** has been generated + - **Copy** this token (keep it secure!) + +Generate Discord Token + +3. Set **Bot Permissions**: + - Choose **Administrator** under **General Permissions** (recommended) + +Setting Bot Permissions + + +Klavis AI recommends to select required OAuth scopes only. + + +## Step 4: Configure OAuth Settings + +Navigate to **"OAuth2"**: +- **Client ID**: Copy this value +- **Client Secret**: Generate and copy this value (keep it secure!) +- **Redirect URI**: Set as **https://api.klavis.ai/oauth/discord/callback** + +Configure OAuth Settings + + +Normally, the redirect URI should be set to: `https://api.klavis.ai/oauth/discord/callback` + + +## Step 5: OAuth2 URL Generator + +1. Scroll to **Scopes** + - Select **bot** from the scopes list + - For **Bot Permissions**: select **Administrator** + +OAuth2 URL Generator + +2. Generate URL: + - Integration Type: **Guild Install** (recommended) + - Copy **Generated URL** and paste in browser to complete authentication + +Generate OAuth2 URL and paste in browser + + +You have successfully created a Discord OAuth application! You now have your Client ID and Client Secret, and Discord Token ready for integration with Klavis AI. + + +## (Optional) Step 6: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Discord OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Discord **Client ID** and **Client Secret** from Step 4 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/discord/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Discord OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/discord/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Discord OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Discord, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Discord OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.Discord, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Discord Developer Documentation](https://discord.com/developers/docs/intro/) +- [Discord OAuth 2.0 Authentication Guide](https://discord.com/developers/docs/topics/oauth2) +- [Discord API Scopes Reference](https://discord.com/developers/docs/topics/oauth2#shared-resources-oauth2-scopes) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/dropbox.mdx b/docs/knowledge-base/oauth_app/dropbox.mdx new file mode 100644 index 00000000..7e98068b --- /dev/null +++ b/docs/knowledge-base/oauth_app/dropbox.mdx @@ -0,0 +1,128 @@ +--- +title: "Setting Up Dropbox OAuth App" +description: "Complete guide to creating and configuring a Dropbox OAuth application" +--- + +## Prerequisites + +- Dropbox account (personal or business) +- Access to Dropbox Developer Console + +## Step 1: Create Dropbox Developer Account & App + +1. Visit [https://www.dropbox.com/developers](https://www.dropbox.com/developers) and sign in with your Dropbox credentials +2. Click **"Create apps"** and fill out the form: + - **Choose an API**: Select **"Scoped access"** + - **Choose the type of access**: Select **"Full Dropbox"** for complete access or **"App folder"** for restricted access + - **Name your app**: Choose a descriptive name for your application + - **Choose the Dropbox account**: Select your personal or business account + +Dropbox App Creation Form + +## Step 2: Configure OAuth Settings + +1. After creating the app, you'll be redirected to the app settings page +2. In **"OAuth 2"** section, add redirect URLs: + - `https://api.klavis.ai/oauth/dropbox/callback` +3. Note your **App key** and **App secret** + +OAuth Settings Configuration + +## Step 3: Set Required Permissions + + +Klavis Dropbox MCP Server uses the following OAuth scopes: `account_info.read files.metadata.read files.metadata.write files.content.read files.content.write file_requests.read file_requests.write sharing.read sharing.write contacts.read contacts.write` + + +1. Go to the **"Permissions"** tab and configure the required permissions as shown in the screenshot below: +Permissions Configuration + + +2. Click **"Submit"** to save your permission settings + + +You have successfully created a Dropbox OAuth application! You now have your App Key and App Secret ready for integration with Klavis AI. + + + +**Klavis handles all token management automatically** - we securely store and manage your tokens so you maintain seamless access to your Dropbox data without any interruption. + + +## (Local Testing) Step 4: Generate Access Token + +1. In the **"Settings"** tab, scroll to the **"OAuth 2"** section +2. Click **"Generate"** under **"Generated access token"** +3. Copy and securely store the generated token + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Dropbox OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Dropbox **App Key** and **App Secret** from Step 2 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/dropbox/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Dropbox OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/dropbox/authorize?instance_id=${instanceId}&client_id=${yourAppKey}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Dropbox OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Dropbox, + instanceId: instanceId, + clientId: yourAppKey, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Dropbox OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.DROPBOX, + instance_id=instance_id, + client_id=your_app_key, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Dropbox OAuth Guide](https://developers.dropbox.com/oauth-guide) +- [Dropbox OAuth 2.0 Documentation](https://www.dropbox.com/developers/http/documentation#oauth2-authorize) +- [Dropbox Developer Apps Console](https://www.dropbox.com/developers/apps) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/figma.mdx b/docs/knowledge-base/oauth_app/figma.mdx new file mode 100644 index 00000000..28988e68 --- /dev/null +++ b/docs/knowledge-base/oauth_app/figma.mdx @@ -0,0 +1,119 @@ +--- +title: "Setting Up Figma OAuth App" +description: "Complete guide to creating and configuring a Figma OAuth application" +--- + +## Prerequisites + +- Figma account (personal or business) +- Access to Figma Developer Portal + +## Step 1: Create Figma Developer Account + +1. Visit [https://www.figma.com/developers/](https://www.figma.com/developers/) +2. Click **"Sign Up"** or **"Login"** if you already have an account +3. Sign in with your Figma account or create a new developer account + +Figma Developer Login + +## Step 2: Create a New App + +1. Once logged in, go to your developer dashboard -> **"My Apps"** +2. Click **"Create a new app"** +3. Fill in the app details: + - **App name**: Your application name (e.g., your brand name) + - **Website**: Your company website + - **App logo**: Upload 100x100px PNG (recommended) + +Figma App Creation Form + +## Step 3: Get Your Credentials + + +Klavis Figma MCP Server uses the following OAuth scopes: `files:read,file_comments:write,openid,email,profile` + + +After creating the app, navigate to **OAuth 2.0** tab +- **Client ID**: Copy this value +- **Client Secret**: Generate and copy this value (keep it secure!) +- **Redirect URIs**: Add your callback URL: + - `https://api.klavis.ai/oauth/figma/callback` + +OAuth Settings and Getting the Credentials + + +You have successfully created a Figma OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 4: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Figma OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Figma **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/figma/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Figma OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/figma/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Figma OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Figma, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Figma OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.FIGMA, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Figma Developer Documentation](https://www.figma.com/developers/api) +- [Figma OAuth 2.0 Authentication Guide](https://www.figma.com/developers/api#authentication) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) +- [Figma API Scopes Reference](https://www.figma.com/developers/api#authentication-scopes) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/gmail.mdx b/docs/knowledge-base/oauth_app/gmail.mdx new file mode 100644 index 00000000..49c7bb31 --- /dev/null +++ b/docs/knowledge-base/oauth_app/gmail.mdx @@ -0,0 +1,170 @@ +--- +title: "Setting Up Gmail OAuth App" +description: "Complete guide to creating and configuring a Gmail OAuth application" +--- + +## Prerequisites + +- Google account + +## Step 1: Create a Project + +1. Visit [https://console.cloud.google.com/home/dashboard](https://console.cloud.google.com/home/dashboard) and select the project dropdown at the top. + +Google Cloud project dropdown + +2. Click **New Project**. + +Google Cloud new project button + +3. Enter the project details and click **Create**. + +Google Cloud create project form + +4. Select your newly created project. + +Google Cloud select project + +5. Go to **APIs & Services**. + +Google Cloud APIs & Services menu + +6. Click **Enable APIs and Services**. + +Enable APIs and Services button + +7. Search for and select **Gmail API**. + +Search Gmail API + +8. Click **Enable**. + +Enable Gmail API + +9. Go back to **APIs & Services** and select **OAuth consent screen**. + +OAuth consent screen menu + +10. Enter the required information and save. + +OAuth consent screen form + +11. Go to **Credentials** in APIs & Services and click **Create Credentials**. + +Create credentials button + +12. Select **OAuth Client ID**. + +OAuth Client ID selection + +13. Choose **Web Application** and enter the required details. + +Web application OAuth settings + +14. Add redirect URLs: + `https://api.klavis.ai/oauth/gmail/` + Then click **Create**. + +Add redirect URIs + +15. Your **Client ID** and **Client Secret** will be displayed. + +OAuth credentials screen + + +## Step 2: Request Scopes + + +Klavis Gmail MCP Server uses the following OAuth scopes: +`https://www.googleapis.com/auth/gmail.readonly` +`https://www.googleapis.com/auth/gmail.send` +`https://www.googleapis.com/auth/gmail.compose` +`https://www.googleapis.com/auth/gmail.modify` + + +1. Go to **OAuth consent screen**. + +OAuth consent screen + +2. Click **Data Access**. + +Data access settings + +3. Click **Add or Remove Scopes**. + +Add or remove scopes + +4. Enter the needed scopes and click **Update**. + + +You have successfully created a Gmail OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Gmail OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Gmail **Client ID** and **Client Secret** from Step 1. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/gmail/callback` or your custom callback URL. +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Gmail OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/gmail/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Gmail OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Gmail, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Gmail OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GMAIL, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/google_calendar.mdx b/docs/knowledge-base/oauth_app/google_calendar.mdx new file mode 100644 index 00000000..80108046 --- /dev/null +++ b/docs/knowledge-base/oauth_app/google_calendar.mdx @@ -0,0 +1,168 @@ +--- +title: "Setting Up Google Calendar OAuth App" +description: "Complete guide to creating and configuring a Google Calendar OAuth application" +--- + +## Prerequisites + +- Google account + +## Step 1: Create a Project + +1. Visit [https://console.cloud.google.com/home/dashboard](https://console.cloud.google.com/home/dashboard) and select the project dropdown at the top. + +Google Cloud project dropdown + +2. Click **New Project**. + +Google Cloud new project button + +3. Enter the project details and click **Create**. + +Google Cloud create project form + +4. Select your newly created project. + +Google Cloud select project + +5. Go to **APIs & Services**. + +Google Cloud APIs & Services menu + +6. Click **Enable APIs and Services**. + +Enable APIs and Services button + +7. Search for and select **Google Calendar API**. + +Search Google Calendar API + +8. Click **Enable**. + +Enable Google Calendar API + +9. Go back to **APIs & Services** and select **OAuth consent screen**. + +OAuth consent screen menu + +10. Enter the required information and save. + +OAuth consent screen form + +11. Go to **Credentials** in APIs & Services and click **Create Credentials**. + +Create credentials button + +12. Select **OAuth Client ID**. + +OAuth Client ID selection + +13. Choose **Web Application** and enter the required details. + +Web application OAuth settings + +14. Add redirect URLs: + `https://api.klavis.ai/oauth/gcalendar/callback` + Then click **Create**. + +Add redirect URIs + +15. Your **Client ID** and **Client Secret** will be displayed. + +OAuth credentials screen + + +## Step 2: Request Scopes + + +Klavis Google Calendar MCP Server uses the following OAuth scopes: +`https://www.googleapis.com/auth/calendar.readonly` +`https://www.googleapis.com/auth/calendar.events` + + +1. Go to **OAuth consent screen**. + +OAuth consent screen + +2. Click **Data Access**. + +Data access settings + +3. Click **Add or Remove Scopes**. + +Add or remove scopes + +4. Enter the needed scopes and click **Update**. + + +You have successfully created a Google Calendar OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Google Calendar OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Google Calendar **Client ID** and **Client Secret** from Step 1. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/gcalendar/callback` or your custom callback URL. +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Google Calendar OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/gcalendar/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Google Calendar OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Gcalendar, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Google Calendar OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GCALENDAR, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/google_docs.mdx b/docs/knowledge-base/oauth_app/google_docs.mdx new file mode 100644 index 00000000..e45e6ea8 --- /dev/null +++ b/docs/knowledge-base/oauth_app/google_docs.mdx @@ -0,0 +1,167 @@ +--- +title: "Setting Up Google Docs OAuth App" +description: "Complete guide to creating and configuring a Google Docs OAuth application" +--- + +## Prerequisites + +- Google account + +## Step 1: Create a Project + +1. Visit [https://console.cloud.google.com/home/dashboard](https://console.cloud.google.com/home/dashboard) and select the project dropdown at the top. + +Google Cloud project dropdown + +2. Click **New Project**. + +Google Cloud new project button + +3. Enter the project details and click **Create**. + +Google Cloud create project form + +4. Select your newly created project. + +Google Cloud select project + +5. Go to **APIs & Services**. + +Google Cloud APIs & Services menu + +6. Click **Enable APIs and Services**. + +Enable APIs and Services button + +7. Search for and select **Google Docs API**. + +Search Google Docs API + +8. Click **Enable**. + +Enable Google Docs API + +9. Go back to **APIs & Services** and select **OAuth consent screen**. + +OAuth consent screen menu + +10. Enter the required information and save. + +OAuth consent screen form + +11. Go to **Credentials** in APIs & Services and click **Create Credentials**. + +Create credentials button + +12. Select **OAuth Client ID**. + +OAuth Client ID selection + +13. Choose **Web Application** and enter the required details. + +Web application OAuth settings + +14. Add redirect URLs: + `https://api.klavis.ai/oauth/gdocs/callback` + Then click **Create**. + +Add redirect URIs + +15. Your **Client ID** and **Client Secret** will be displayed. + +OAuth credentials screen + + +## Step 2: Request Scopes + + +Klavis Google Docs MCP Server uses the following OAuth scopes: +`https://www.googleapis.com/auth/drive` + + +1. Go to **OAuth consent screen**. + +OAuth consent screen + +2. Click **Data Access**. + +Data access settings + +3. Click **Add or Remove Scopes**. + +Add or remove scopes + +4. Enter the needed scopes and click **Update**. + + +You have successfully created a Google Docs OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Google Docs OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Google Docs **Client ID** and **Client Secret** from Step 1. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/gdocs/callback` or your custom callback URL. +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Google Docs OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/gdocs/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Google Docs OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Gdocs, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Google Docs OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GDOCS, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/google_drive.mdx b/docs/knowledge-base/oauth_app/google_drive.mdx new file mode 100644 index 00000000..0a4ad622 --- /dev/null +++ b/docs/knowledge-base/oauth_app/google_drive.mdx @@ -0,0 +1,167 @@ +--- +title: "Setting Up Google Drive OAuth App" +description: "Complete guide to creating and configuring a Google Drive OAuth application" +--- + +## Prerequisites + +- Google account + +## Step 1: Create a Project + +1. Visit [https://console.cloud.google.com/home/dashboard](https://console.cloud.google.com/home/dashboard) and select the project dropdown at the top. + +Google Cloud project dropdown + +2. Click **New Project**. + +Google Cloud new project button + +3. Enter the project details and click **Create**. + +Google Cloud create project form + +4. Select your newly created project. + +Google Cloud select project + +5. Go to **APIs & Services**. + +Google Cloud APIs & Services menu + +6. Click **Enable APIs and Services**. + +Enable APIs and Services button + +7. Search for and select **Google Drive API**. + +Search Google Drive API + +8. Click **Enable**. + +Enable Google Drive API + +9. Go back to **APIs & Services** and select **OAuth consent screen**. + +OAuth consent screen menu + +10. Enter the required information and save. + +OAuth consent screen form + +11. Go to **Credentials** in APIs & Services and click **Create Credentials**. + +Create credentials button + +12. Select **OAuth Client ID**. + +OAuth Client ID selection + +13. Choose **Web Application** and enter the required details. + +Web application OAuth settings + +14. Add redirect URLs: + `https://api.klavis.ai/oauth/gdrive/callback` + Then click **Create**. + +Add redirect URIs + +15. Your **Client ID** and **Client Secret** will be displayed. + +OAuth credentials screen + + +## Step 2: Request Scopes + + +Klavis Google Drive MCP Server uses the following OAuth scopes: +`https://www.googleapis.com/auth/drive` + + +1. Go to **OAuth consent screen**. + +OAuth consent screen + +2. Click **Data Access**. + +Data access settings + +3. Click **Add or Remove Scopes**. + +Add or remove scopes + +4. Enter the needed scopes and click **Update**. + + +You have successfully created a Google Drive OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Google Drive OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Google Drive **Client ID** and **Client Secret** from Step 1. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/gdrive/callback` or your custom callback URL. +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Google Drive OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/gdrive/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Google Drive OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Gdrive, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Google Drive OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GDRIVE, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/google_sheets.mdx b/docs/knowledge-base/oauth_app/google_sheets.mdx new file mode 100644 index 00000000..f7cb8011 --- /dev/null +++ b/docs/knowledge-base/oauth_app/google_sheets.mdx @@ -0,0 +1,167 @@ +--- +title: "Setting Up Google Sheets OAuth App" +description: "Complete guide to creating and configuring a Google Sheets OAuth application" +--- + +## Prerequisites + +- Google account + +## Step 1: Create a Project + +1. Visit [https://console.cloud.google.com/home/dashboard](https://console.cloud.google.com/home/dashboard) and select the project dropdown at the top. + +Google Cloud project dropdown + +2. Click **New Project**. + +Google Cloud new project button + +3. Enter the project details and click **Create**. + +Google Cloud create project form + +4. Select your newly created project. + +Google Cloud select project + +5. Go to **APIs & Services**. + +Google Cloud APIs & Services menu + +6. Click **Enable APIs and Services**. + +Enable APIs and Services button + +7. Search for and select **Google Sheets API**. + +Search Google Sheets API + +8. Click **Enable**. + +Enable Google Sheets API + +9. Go back to **APIs & Services** and select **OAuth consent screen**. + +OAuth consent screen menu + +10. Enter the required information and save. + +OAuth consent screen form + +11. Go to **Credentials** in APIs & Services and click **Create Credentials**. + +Create credentials button + +12. Select **OAuth Client ID**. + +OAuth Client ID selection + +13. Choose **Web Application** and enter the required details. + +Web application OAuth settings + +14. Add redirect URLs: + `https://api.klavis.ai/oauth/gsheets/callback` + Then click **Create**. + +Add redirect URIs + +15. Your **Client ID** and **Client Secret** will be displayed. + +OAuth credentials screen + + +## Step 2: Request Scopes + + +Klavis Google Sheets MCP Server uses the following OAuth scopes: +`https://www.googleapis.com/auth/drive` + + +1. Go to **OAuth consent screen**. + +OAuth consent screen + +2. Click **Data Access**. + +Data access settings + +3. Click **Add or Remove Scopes**. + +Add or remove scopes + +4. Enter the needed scopes and click **Update**. + + +You have successfully created a Google Sheets OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Google Sheets OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Google Sheets **Client ID** and **Client Secret** from Step 1. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/gsheets/callback` or your custom callback URL. +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Google Sheets OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/gsheets/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Google Sheets OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Gsheets, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Google Sheets OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.GSHEETS, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/hubspot.mdx b/docs/knowledge-base/oauth_app/hubspot.mdx new file mode 100644 index 00000000..44c92abd --- /dev/null +++ b/docs/knowledge-base/oauth_app/hubspot.mdx @@ -0,0 +1,120 @@ +--- +title: "Setting Up HubSpot OAuth App" +description: "Complete guide to creating and configuring a HubSpot OAuth application" +--- + +## Prerequisites + +- HubSpot developers account + +## Step 1: Create HubSpot Developer Account & App + +1. Visit [https://developers.hubspot.com/](https://developers.hubspot.com/) and sign in +2. Click **"Create App"** and fill out the form: + - **Public app name**: Choose a descriptive name + - **App logo**: Upload 100x100px PNG (recommended) + - **Description**: Associate with your company + +HubSpot App Form + +## Step 2: Configure OAuth Settings + +1. Go to the **"Auth"** tab in your application dashboard +2. Add redirect URLs: `https://api.klavis.ai/oauth/hubspot/callback` + +Here is an example of Klavis AI OAuth app configuration: + +OAuth Settings Configuration + +## Step 3: Request Scopes + + +Klavis Hubspot MCP Server uses the following OAuth scopes: `account-info.security.read, accounting, cms.domains.read, cms.domains.write, crm.export, crm.import, crm.lists.read, crm.lists.write, crm.objects.companies.read, crm.objects.companies.write, crm.objects.contacts.read, crm.objects.contacts.write, crm.objects.deals.read, crm.objects.deals.write, crm.objects.marketing_events.read, crm.objects.marketing_events.write, crm.objects.owners.read, crm.objects.quotes.read, crm.objects.quotes.write, crm.schemas.companies.read, crm.schemas.companies.write, crm.schemas.contacts.read, crm.schemas.contacts.write, oauth, settings.users.read, settings.users.write, tickets, timeline, e-commerce, crm.objects.custom.read, crm.schemas.custom.read, content, sales-email-read` + + +1. Go to the **"Auth"** tab in your application dashboard +2. Scroll down to **"Scopes"**. +3. Click **"Add New Scope"** and add required scopes. + +Here is an example of Klavis AI OAuth app configuration: + +Scope Addition + +## Step 4: Create App + +1. Now Click the **"Create App"** Button + + +You have successfully created a HubSpot OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own HubSpot OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your HubSpot **Client ID** and **Client Secret** +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/hubspot/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating HubSpot OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/hubspot/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating HubSpot OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Hubspot, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating HubSpot OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.HUBSPOT, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [HubSpot OAuth Documentation](https://developers.hubspot.com/docs/guides/apps/authentication/working-with-oauth) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/linkedin.mdx b/docs/knowledge-base/oauth_app/linkedin.mdx new file mode 100644 index 00000000..69c97786 --- /dev/null +++ b/docs/knowledge-base/oauth_app/linkedin.mdx @@ -0,0 +1,117 @@ +--- +title: "Setting Up LinkedIn OAuth App" +description: "Complete guide to creating and configuring a LinkedIn OAuth application" +--- + +## Prerequisites + +- LinkedIn personal account +- LinkedIn company page (required for app creation with admin access) + +## Step 1: Create LinkedIn Developer Account & App + +1. Visit [https://developer.linkedin.com/](https://developer.linkedin.com/) and sign in +2. Click **"Create App"** and fill out the form: + - **App name**: Choose a descriptive name + - **LinkedIn Page**: Associate with your company page + - **App logo**: Upload 100x100px PNG (recommended) + - Accept LinkedIn's API Terms of Use + +LinkedIn App Creation Form + +## Step 2: Configure OAuth Settings + +1. Go to the **"Auth"** tab in your application dashboard +2. Add redirect URLs: `https://api.klavis.ai/oauth/linkedin/callback` + +Here is an example of Klavis AI OAuth app configuration: + +OAuth Settings Configuration + +## Step 3: Request Scopes + + +Klavis LinkedIn MCP Server uses the following OAuth scopes: `openid,profile,email,w_member_social` + + +1. Go to **"Products"** tab and request **"Shared on LinkedIn"** and **"Sign In with LinkedIn using OpenID Connect"** +2. Once approved, you can see **Client ID** and **Client Secret** from the **"Auth"** tab + +Here is an example of Klavis AI OAuth app configuration: + +Product Access and Credentials + + +You have successfully created a LinkedIn OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 4: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own LinkedIn OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your LinkedIn **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/linkedin/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating LinkedIn OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/linkedin/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating LinkedIn OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Linkedin, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating LinkedIn OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.LINKEDIN, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [LinkedIn OAuth Documentation](https://docs.microsoft.com/en-us/linkedin/shared/authentication/authorization-code-flow) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/monday.mdx b/docs/knowledge-base/oauth_app/monday.mdx new file mode 100644 index 00000000..a7c14f04 --- /dev/null +++ b/docs/knowledge-base/oauth_app/monday.mdx @@ -0,0 +1,89 @@ +--- +title: "Setting Up Monday.com OAuth App" +description: "Complete guide to creating and configuring a Monday.com OAuth application" +--- + +## Prerequisites + +- Monday.com account (admin access recommended) + +## Step 1: Create a Monday App & Get Credentials + +1. Log in to [Monday.com](https://monday.com) +2. Click your **avatar (top-right)** → **Developers** +3. In the Developer Console, click **Create App** +4. Once the app is created, you can immediately see the **Client ID** and **Client Secret** +5. Copy both and keep them safe + +Monday App Creation +Monday App Creation + + + +You now have your Monday.com **Client ID** and **Client Secret** ready for integration with Klavis AI. + + +## Step 2: Configure OAuth & Permissions + +Klavis Monday MCP Server typically uses: +`users:read, boards:read, boards:write, updates:write` + + +1. Open your app → go to **OAuth & Permissions** + +2. Under **Scopes**, select the permissions your app requires. + +OAuth & Permissions + +3. Under **Redirect URLs**, add: + + ``` + https://api.klavis.ai/oauth/monday/callback + ``` +OAuth & Permissions + + +## Step 3: Promote to Live + +1. Once everything is set up, click **Promote to Live**. +Your app will now have permanent credentials and can be used in production + +Promote to Live + + +You have successfully created Monday.com OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 4: White Labeling + + +White labeling lets you use your own Monday.com OAuth app branding instead of Klavis AI’s default. + + +To use your own OAuth app: + +1. Go to [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) + +2. Enter your **Monday Client ID** and **Client Secret** from Step 1 + +3. Confirm the redirect URI: + + ``` + https://api.klavis.ai/oauth/monday/callback + ``` + +4. Start the OAuth flow with your client ID: + + ```javascript + const authUrl = `https://api.klavis.ai/oauth/monday/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + ``` + + +For detailed examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources + +- [Monday.com Docs](https://developer.monday.com/apps) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) diff --git a/docs/knowledge-base/oauth_app/moneybird.mdx b/docs/knowledge-base/oauth_app/moneybird.mdx new file mode 100644 index 00000000..5fa27d77 --- /dev/null +++ b/docs/knowledge-base/oauth_app/moneybird.mdx @@ -0,0 +1,111 @@ +--- +title: "Setting Up Moneybird OAuth App" +description: "Complete guide to creating and configuring a Moneybird OAuth application" +--- + +## Prerequisites + +- Moneybird Account + +## Step 1: Registration of your application + +1. Visit [https://moneybird.com/user/applications/new](https://moneybird.com/user/applications/new) +2. Enter a Good Name +3. Add callback URL: `https://api.klavis.ai/oauth/moneybird/callback` +4. Click Save. + +Moneybird App Creation Form + +From This, You will Get Your Client ID and Client secret. + +Moneybird App Creation Form + + +## Step 2: Request Scopes + + +Klavis Moneybird MCP Server uses the following OAuth scopes: `sales_invoices, documents, estimates, bank, time_entries, settings` + + +When redirecting a user to the Moneybird authorization page, include the scope parameter in your URL. Multiple scopes should be space-separated. + +Example authorization URL: +``` +curl -vv \ + 'https://moneybird.com/oauth/authorize?client_id=9a833de2d13b07dfdfb50a8124b148d8&redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=code&scope=extimates%20bank' +``` + + +You have successfully created a Moneybird OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 3: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Moneybird OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Moneybird **Client ID** and **Client Secret** from Step 1 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/moneybird/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Moneybird OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/moneybird/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Moneybird OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Moneybird, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Moneybird OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.MONEYBIRD, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Moneybird OAuth Documentation](https://developer.moneybird.com/authentication/) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/oauth-scopes.mdx b/docs/knowledge-base/oauth_app/oauth-scopes.mdx new file mode 100644 index 00000000..7aa06d3a --- /dev/null +++ b/docs/knowledge-base/oauth_app/oauth-scopes.mdx @@ -0,0 +1,370 @@ +--- +title: "OAuth Scopes Reference" +--- + +## Overview + +This page provides a comprehensive reference of OAuth scopes required for each supported app in Klavis AI. + + +These are the minimum OAuth scopes required for all tools in each Klavis MCP server to function properly. + + +## Supported OAuth App + + + + ``` + data.records:read + data.records:write + data.recordComments:read + data.recordComments:write + schema.bases:read + schema.bases:write + user.email:read + ``` + + + + ``` + goals:read + project_templates:read + projects:read + projects:write + projects:delete + stories:read + task_templates:read + tasks:read + tasks:write + tasks:delete + teams:read + users:read + workspaces:read + workspaces.typeahead:read + ``` + + + + + Attio uses API key authentication rather than OAuth scopes. No specific scopes are required for this integration. + + + + + ``` + app:read + app:write + asset:read + asset:write + brandtemplate:content:read + brandtemplate:meta:read + comment:read + comment:write + design:content:read + design:content:write + design:meta:read + design:permission:read + design:permission:write + folder:read + folder:write + folder:permission:read + folder:permission:write + profile:read + ``` + + + + + ClickUp uses OAuth 2.0 with client credentials. The specific scopes are managed through the ClickUp app configuration rather than explicit scope parameters. + + + + + ``` + all.full_access + offline_access + ``` + + + + ``` + write:space.permission:confluence + write:space:confluence + read:attachment:confluence + read:page:confluence + write:page:confluence + search:confluence + read:space:confluence + read:hierarchical-content:confluence + write:confluence-content + read:confluence-content.all + read:confluence-content.summary + read:confluence-space.summary + write:confluence-space + write:confluence-file + read:confluence-props + write:confluence-props + manage:confluence-configuration + read:confluence-content.permission + read:confluence-user + read:confluence-groups + write:confluence-groups + readonly:content.attachment:confluence + read:me + read:account + report:personal-data + offline_access + ``` + + + + ``` + account_info.read + files.metadata.read + files.metadata.write + files.content.read + files.content.write + file_requests.read + file_requests.write + sharing.read + sharing.write + contacts.read + contacts.write + ``` + + + + ``` + repo + read:user + read:org + security_events + ``` + + + + ``` + https://www.googleapis.com/auth/gmail.readonly + https://www.googleapis.com/auth/gmail.send + https://www.googleapis.com/auth/gmail.compose + https://www.googleapis.com/auth/gmail.modify + ``` + + + + ``` + https://www.googleapis.com/auth/calendar.readonly + https://www.googleapis.com/auth/calendar.events + ``` + + + + ``` + https://www.googleapis.com/auth/drive + ``` + + + + ``` + https://www.googleapis.com/auth/drive + ``` + + + + ``` + https://www.googleapis.com/auth/drive + ``` + + + + ``` + account-info.security.read + accounting + cms.domains.read + cms.domains.write + crm.export + crm.import + crm.lists.read + crm.lists.write + crm.objects.companies.read + crm.objects.companies.write + crm.objects.contacts.read + crm.objects.contacts.write + crm.objects.deals.read + crm.objects.deals.write + crm.objects.marketing_events.read + crm.objects.marketing_events.write + crm.objects.owners.read + crm.objects.quotes.read + crm.objects.quotes.write + crm.schemas.companies.read + crm.schemas.companies.write + crm.schemas.contacts.read + crm.schemas.contacts.write + oauth + settings.users.read + settings.users.write + tickets + timeline + e-commerce + crm.objects.custom.read + crm.schemas.custom.read + content + sales-email-read + ``` + + + + ``` + read:jira-user + read:jira-work + write:jira-work + manage:jira-configuration + offline_access + ``` + + + + ``` + read + write + issues:create + comments:create + timeSchedule:write + ``` + + + + ``` + openid + profile + email + w_member_social + ``` + + + + ``` + users:read + boards:read + boards:write + updates:write + ``` + + + + ``` + sales_invoices + documents + estimates + bank + time_entries + settings + ``` + + + + ``` + read_content + update_content + insert_content + read_comments + insert_comments + read_user_information_including_email_addresses + ``` + + + + ``` + openid + profile + email + offline_access + Files.ReadWrite.All + User.Read + ``` + + + + ``` + com.intuit.quickbooks.accounting + com.intuit.quickbooks.payment + openid + ``` + + + + ``` + api + refresh_token + offline_access + ``` + + + + **Bot Scopes:** + ``` + app_mentions:read + channels:history + channels:read + chat:write + chat:write.customize + commands + files:read + groups:read + groups:write + im:history + im:read + mpim:read + reactions:read + reactions:write + team:read + users:read + ``` + + **User Scopes:** + ``` + channels:history + channels:read + channels:write + chat:write + groups:history + groups:read + groups:write + im:history + im:read + im:write + mpim:history + mpim:read + users:read + users:write + search:read + ``` + + + + + Supabase uses OAuth 2.0 with client credentials. The specific scopes are managed through the Supabase project configuration rather than explicit scope parameters. + + + + + ``` + global + ``` + + + + ``` + accounting.transactions.read + accounting.transactions + offline_access + ``` + + + +## Need Help? + + +If you need assistance with OAuth scope configuration for any specific application, please refer to the individual OAuth app setup guides or contact our support team. + \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/onedrive.mdx b/docs/knowledge-base/oauth_app/onedrive.mdx new file mode 100644 index 00000000..7e2c13f8 --- /dev/null +++ b/docs/knowledge-base/oauth_app/onedrive.mdx @@ -0,0 +1,128 @@ +--- +title: "Setting Up OneDrive OAuth App" +description: "Complete guide to creating and configuring a OneDrive OAuth application" +--- + +## Prerequisites + +- Microsoft account (personal or organizational) +- Access to [Azure Portal](https://portal.azure.com/) + +## Step 1: Create Microsoft App Registration + +1. Visit [https://portal.azure.com/](https://portal.azure.com/) and sign in +2. Search **App registrations** → click **New registration** +3. Fill out the form: + + * **Name**: Choose a descriptive app name + * **Supported account types**: Select **Accounts in any organizational directory and personal Microsoft accounts** + * **Redirect URI**: `https://api.klavis.ai/oauth/onedrive/callback` +4. Click **Register** + +OneDrive App Registration Form + +## Step 2: Configure API Permissions + + +Klavis OneDrive MCP Server uses the following OAuth scopes:`openid, profile, email, offline_access, Files.ReadWrite.All, User.Read` + + +1. Go to your app → **API Permissions** +2. Click **Add a permission** → **Microsoft Graph** → **Delegated permissions** +3. Select: + + * `openid`, `profile`, `email`, `offline_access` + * `Files.ReadWrite.All` + * `User.Read` +4. Click **Add permissions** +5. (Optional) Click **Grant admin consent** if you want to approve for all org users + +Graph API Permissions + +## Step 3: Collect Client ID & Secret + +1. Go to **Certificates & Secrets** tab +2. Click **New client secret** → set description + expiry → **Add** +3. Copy the **Client Secret Value** (shown only once) +4. From **Overview** tab, copy **Application (client) ID** + +Client ID + +Secret + + +You have successfully created a OneDrive OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## (Optional) Step 4: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own OneDrive OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your OneDrive **Client ID** and **Client Secret** from Step 3 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/onedrive/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating OneDrive OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/onedrive/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating OneDrive OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Onedrive, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating OneDrive OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.ONEDRIVE, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources + +* [Microsoft Identity Platform Docs](https://learn.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-auth-code-flow) +* [Microsoft Graph API – OneDrive](https://learn.microsoft.com/en-us/graph/onedrive-concept-overview) +* [Klavis OAuth & White Labeling Guide](/auth/white-label) +* [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) diff --git a/docs/knowledge-base/oauth_app/quickbooks.mdx b/docs/knowledge-base/oauth_app/quickbooks.mdx new file mode 100644 index 00000000..0feb450c --- /dev/null +++ b/docs/knowledge-base/oauth_app/quickbooks.mdx @@ -0,0 +1,176 @@ +--- +title: "Setting Up QuickBooks OAuth App" +description: "Complete guide to creating and configuring a QuickBooks OAuth application" +--- + +## Prerequisites + +- Intuit Developer account (free to sign up) +- Access to Intuit Developer Platform + +## Step 1: Create Intuit Developer Account & Workspace + +1. Visit [https://developer.intuit.com](https://developer.intuit.com) and sign in with your Intuit credentials + +Intuit Developer Platform Homepage + +2. Click **"Create a workspace"** to set up your development environment + +Workspaces Overview + +3. Fill out the workspace creation form with three steps: + - **Step 1 - Basic Information**: Enter workspace name and description + - **Step 2 - Company Information**: Provide your company details + - **Step 3 - Contact Information**: Add your contact information + +Create Workspace Dialog + +Company Information Form + +Contact Information Form + +Contact Information Filled + +4. Click **"Create workspace"** to complete the setup + +Workspace Dashboard + +## Step 2: Create QuickBooks OAuth Application + +1. After workspace creation, click **"Create an app"** to start building your QuickBooks integration + +Create App Dialog + +2. In the app creation dialog, select app type and configure basic information: + - **Choose "QuickBooks Online"** as your app type + - **Enter your app name** (avoid using "Intuit" or "QuickBooks" in the name) + - **Provide app description** + +App Basic Information + +## Step 3: Configure OAuth Permissions + + +Klavis QuickBooks MCP Server uses the following OAuth scopes: `com.intuit.quickbooks.accounting` (for full accounting data access) and `com.intuit.quickbooks.payment` (for payment processing) + + +1. In the permissions configuration step, select the required QuickBooks scopes: + - **com.intuit.quickbooks.accounting** - For accessing accounting data (customers, invoices, items, etc.) + - **com.intuit.quickbooks.payment** - For payment processing capabilities + +Permissions Setup + +2. Review and confirm the permissions by clicking **"Confirm"** + +Permissions Confirmed + +## Step 4: Access Your OAuth Credentials + +1. Once your app is created, you'll see the success page with your development credentials + +App Created Successfully + +2. Navigate to **"Keys and credentials"** in the left sidebar to view your OAuth credentials: + - **Client ID**: Your OAuth application identifier + - **Client Secret**: Your OAuth application secret (keep this secure) + +Keys and Credentials Page + +3. Configure your redirect URIs: + - For Klavis integration: `https://api.klavis.ai/oauth/quickbooks/callback` + - For local development: `http://localhost:3000/oauth/quickbooks/callback` + + +You have successfully created a QuickBooks OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + + +**Klavis handles all token management automatically** - we securely store and manage your OAuth tokens so you maintain seamless access to your QuickBooks data without any interruption. + + +## (Local Testing) Step 5: Testing Your Integration + +1. Open the Intuit API Playground: https://developer.intuit.com/app/developer/playground +2. Select your app and environment (Development) +3. Click "Get Access Token" to authorize and retrieve the token +4. Copy both the Access Token and the Realm ID for testing API calls + +Intuit API Playground - Get Access Token and Realm ID + + +Use the Access Token as the Bearer token in Authorization header and the Realm ID as the companyId for API requests. + + +## (Optional) Step 6: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own QuickBooks OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your QuickBooks **Client ID** and **Client Secret** from Step 4 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/quickbooks/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating QuickBooks OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/quickbooks/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating QuickBooks OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Quickbooks, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating QuickBooks OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.QUICKBOOKS, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [QuickBooks Online API Documentation](https://developer.intuit.com/app/developer/qbo/docs/api/accounting/all-entities/account) +- [QuickBooks OAuth 2.0 Guide](https://developer.intuit.com/app/developer/qbo/docs/develop/authentication-and-authorization/oauth_2.0) +- [Intuit Developer Platform](https://developer.intuit.com) +- [QuickBooks API Explorer](https://developer.intuit.com/app/developer/qbo/docs/api/accounting/all-entities/account) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) diff --git a/docs/knowledge-base/oauth_app/salesforce.mdx b/docs/knowledge-base/oauth_app/salesforce.mdx new file mode 100644 index 00000000..44216a94 --- /dev/null +++ b/docs/knowledge-base/oauth_app/salesforce.mdx @@ -0,0 +1,149 @@ +--- +title: "Setting Up Salesforce OAuth App" +description: "Complete guide to creating and configuring a Salesforce OAuth application" +--- + +## Prerequisites + +- Salesforce account (personal or business) +- Access to Salesforce Developer Portal + +## Step 1: Create Salesforce Developer Account + +1. Visit [https://developer.salesforce.com/](https://developer.salesforce.com/) +2. Click **"Sign Up"** or **"Login"** if you already have an account +3. Sign in with your Salesforce account or create a new developer account + +Salesforce Developer Login + + +## Step 2: Enable Connected Apps + +1. Once logged in, go to **"Setup Menu"** (Gear Icon) and click **"Setup"** +2. Search **"External Client Apps"** in the **"Quick Find"** search box +3. In **"External Client App Settings"**, enable **"Allow creation of connected apps"** +4. Click **"New Connected App"** + +Salesforce App Creation + +## Step 3: Fill Basic App Information + +1. Fill the necessary app details: + - **Connected App Name**: Your application name (e.g., your brand name) + - **API Name**: Auto-generated from app name (only letters, numbers, and underscores allowed) + - **Contact Email**: Your contact email for Salesforce support + - **Contact Phone**: Your contact phone for Salesforce support + - **Logo Image URL**: (Optional) HTTPS URL for your app logo (max 100 KB, preferably under 20 KB) + - **Info URL**: (Optional) Web page with more information about your app + - **Description**: (Optional) Up to 256 characters describing your app + + Basic App Information + + +Klavis Salesforce MCP Server uses the following OAuth scopes: `api,refresh_token,offline_access` + + +1. In the **API (Enable OAuth Settings)** section: + - Select **"Enable OAuth Settings"** + - **Callback URL**: Enter `https://api.klavis.ai/oauth/salesforce/callback` + +2. **Select OAuth Scopes**: Move required scopes from **"Available OAuth Scopes"** to **"Selected OAuth Scopes"**: + - `Manage User Data via APIs (api)` - required to manage user data via APIs + - `Perform requests on your behalf at any time (refresh_token, offline_access)` - required to perform requests at any time + +3. **Additional Settings**: + - Enable **"Require Secret for Web Server Flow"** if your app can keep the client secret confidential + - Enable **"Require Secret for Refresh Token Flow"** + - Enable **"Enable Authorization Code and Credentials Flow"** + - Disable **"Require PKCE Extension for Supported Authorization Flows"** + +4. Click **"Save"** to create the app + +## Step 4: Get Consumer Key and Secret + +After creating the app, follow these steps to get the credentials. + +1. From dashboard, go to **"Setup Menu"** (Gear Icon) and click **"Setup"** +2. Search **"App Manager"** in the **"Quick Find"** search box +3. Find your connected app in the list and click the dropdown arrow, then select **"View"** + +View Connected Apps + +4. In the **API (Enable OAuth Settings)** section, click **"Manage Consumer Details"** +5. Verify your identity using the **verification code** sent to your email +6. Copy the **Consumer Key** and **Consumer Secret** (keep them secure!) + +Get Consumer Key and Secret + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Salesforce OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Salesforce **Consumer Key** and **Consumer Secret** from Step 5 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/salesforce/callback` or your custom callback URL +4. **Initiate OAuth**: Use your Client Id when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Salesforce OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/salesforce/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Salesforce OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Salesforce, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Salesforce OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.SALESFORCE, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Salesforce Developer Documentation](https://developer.salesforce.com/docs) +- [Salesforce OAuth Authentication Guide](https://developer.salesforce.com/docs/atlas.en-us.api_streaming.meta/api_streaming/code_sample_auth_oauth.htm) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) +- [Salesforce API Scopes Reference](https://developer.salesforce.com/docs/platform/mobile-sdk/guide/oauth-scope-parameter-values.html) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/slack.mdx b/docs/knowledge-base/oauth_app/slack.mdx new file mode 100644 index 00000000..20423345 --- /dev/null +++ b/docs/knowledge-base/oauth_app/slack.mdx @@ -0,0 +1,79 @@ +--- +title: "Setting Up Slack OAuth App" +description: "Complete guide to creating and configuring a Slack OAuth application" +--- + +## Prerequisites + +- Slack Account + +## Step 1: Create Slack Developer Account & App + +1. Visit [https://api.slack.com/apps](https://api.slack.com/apps) +2. On the **Your Apps** page, select **Create New App**. +3. Select **From Scratch**. +4. Enter your **App Name**. +5. Select the **Workspace** where you'll be developing your app. You'll be able to distribute your app to other workspaces later if you choose. +6. Select **Create App**. + +Slack App Creation Form + +## Step 2: Configure OAuth Settings + +1. Go to the **"OAuth & Permissions"** tab in your application dashboard +2. Add redirect URLs: `https://api.klavis.ai/oauth/slack/callback` +3. Click **"Save URLs"** + +Here is an example of Klavis AI OAuth app configuration: + +OAuth Settings Configuration + +## Step 3: Request Scopes + + +Klavis Slack MCP Server uses the following OAuth scopes: + + **Bot Scopes:** `app_mentions:read, channels:join, chat:write, im:history, reactions:read, reactions:write` + + **User Scopes:** `channels:history, channels:read, channels:write.invites, chat:write, groups:history, groups:read, groups:write.invites, im:history, im:read, im:write, mpim:history, mpim:read, mpim:write, search:read, users:read` + + +1. Go to the **"OAuth & Permissions"** tab in your application dashboard. +2. Scroll Down to **"Scopes"**, and add scopes + +Scopes Settings Configuration + +You have successfully created a Slack OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +## Step 4: Configure App Distribution + +1. Go to the **"Manage Distribution"** tab in your application dashboard +2. Configure your app's public distribution settings as needed + +Slack App Distribution Settings + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Slack OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Slack **Client ID** and **Client Secret**. +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/slack/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + ```javascript + const authUrl = `https://api.klavis.ai/oauth/slack/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + ``` + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Slack Quickstart](https://api.slack.com/quickstart) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) \ No newline at end of file diff --git a/docs/knowledge-base/oauth_app/xero.mdx b/docs/knowledge-base/oauth_app/xero.mdx new file mode 100644 index 00000000..c32fc084 --- /dev/null +++ b/docs/knowledge-base/oauth_app/xero.mdx @@ -0,0 +1,147 @@ +--- +title: "Setting Up Xero OAuth App" +description: "Complete guide to creating and configuring a Xero OAuth application" +--- + +## Prerequisites + +- Xero account (personal or business) +- Access to Xero Developer Portal + +## Step 1: Create Xero Developer Account + +1. Visit [https://developer.xero.com/](https://developer.xero.com/) +2. Click **"Get started for free"** or **"Login"** if you already have an account +3. Sign in with your Xero account or create a new developer account + +Xero Developer Login + +## Step 2: Create a New App + +1. Once logged in, go to your developer dashboard +2. Click **"New app"** or **"Create an app"** +3. Choose **"Web App"** as the integration type +4. Fill in the app details: + - **App name**: Your application name (e.g., your brand name) + - **Company or application URL**: Your company website + - **Privacy policy URL**: Your privacy policy URL + - **Terms of service URL**: Your terms of service URL + +Xero App Creation Form + + +Normally, the redirect URI should be set to: `https://api.klavis.ai/oauth/xero/callback` + + +## Step 3: Configure OAuth Settings + + +Klavis Xero MCP Server uses the following OAuth scopes: `accounting.transactions.read accounting.transactions offline_access` + + +1. **Redirect URIs**: Add your callback URL: + - `https://api.klavis.ai/oauth/xero/callback` + +2. **Scopes**: Select the scopes your application needs: + - `offline_access` (required for refresh tokens) + - `accounting.transactions.read` (for reading transaction data) + - `accounting.transactions` (for transaction operations) + - Add any additional scopes based on your needs + +OAuth Settings and Connection Configuration + + +You can connect up to 25 organisations to uncertified apps. [Read more about uncertified app limits](https://developer.xero.com/guides/oauth2/limits/). + + +## Step 4: Get Your Credentials + +After creating the app, you'll see: +- **Client ID**: Copy this value +- **Client Secret**: Generate and copy this value (keep it secure!) + + +You have successfully created a Xero OAuth application! You now have your Client ID and Client Secret ready for integration with Klavis AI. + + +### Xero Token Expiration +- **Access Tokens**: Expire after 30 minutes +- **Refresh Tokens**: Expire after 60 days (rolling expiration - resets when used) + + +**Klavis handles all token management automatically** - we refresh your tokens before they expire so you maintain seamless access to your Xero data without any interruption. + + +## (Optional) Step 5: White Labeling + + +White labeling allows you to customize the OAuth experience with your own branding instead of Klavis AI's. + + +If you want to use your own Xero OAuth application with custom branding: + +1. **Configure White Labeling**: Go to [https://www.klavis.ai/home/white-label](https://www.klavis.ai/home/white-label) +2. **Add Your Credentials**: Enter your Xero **Client ID** and **Client Secret** from Step 4 +3. **Set Redirect URI**: Use `https://api.klavis.ai/oauth/xero/callback` or your custom callback URL +4. **Initiate OAuth**: Use your client ID when starting the OAuth flow: + + + + ```javascript without SDK + // Example: Initiating Xero OAuth with white-label + const authUrl = `https://api.klavis.ai/oauth/xero/authorize?instance_id=${instanceId}&client_id=${yourClientId}`; + window.location.href = authUrl; + ``` + + ```typescript TypeScript SDK + import { Klavis } from "@klavis/sdk"; + + const klavis = new Klavis({ + apiKey: "YOUR_API_KEY" + }); + + // Example: Initiating Xero OAuth with white-label + const oauthUrl = await klavis.mcpServer.getOAuthUrl({ + serverName: Klavis.McpServerName.Xero, + instanceId: instanceId, + clientId: yourClientId, + // redirectUri: YOUR_REDIRECT_URI, + // scope: "YOUR_SCOPES", + }); + + window.location.href = oauthUrl; + ``` + + ```python Python SDK + import webbrowser + from klavis import Klavis + from klavis.types import McpServerName + + klavis = Klavis(api_key="YOUR_API_KEY") + + # Example: Initiating Xero OAuth with white-label + oauth_url = klavis.mcp_server.get_oauth_url( + server_name=McpServerName.XERO, + instance_id=instance_id, + client_id=your_client_id, + # redirect_uri="YOUR_REDIRECT_URI", + # scope="YOUR_SCOPES" + ) + + # Open OAuth URL in user's default browser + webbrowser.open(oauth_url) + ``` + + + + +For detailed white labeling implementation and code examples, see our [OAuth & White Labeling guide](/auth/white-label). + + +## Resources +- [Xero Developer Documentation](https://developer.xero.com/) +- [Xero OAuth 2.0 Authentication Guide](https://developer.xero.com/guides/oauth2/overview/) +- [Klavis OAuth & White Labeling Guide](/auth/white-label) +- [Klavis White Label Dashboard](https://www.klavis.ai/home/white-label) +- [Xero API Scopes Reference](https://developer.xero.com/guides/oauth2/scopes/) +- [Xero OAuth Limits for Uncertified Apps](https://developer.xero.com/guides/oauth2/limits/) \ No newline at end of file diff --git a/docs/knowledge-base/onboarding/create-your-first-mcp-server.mdx b/docs/knowledge-base/onboarding/create-your-first-mcp-server.mdx new file mode 100644 index 00000000..642aa68b --- /dev/null +++ b/docs/knowledge-base/onboarding/create-your-first-mcp-server.mdx @@ -0,0 +1,51 @@ +--- +title: "Create Your First MCP Server" +--- + +## Quick Start Guide + +Create your first MCP server in 10 seconds using Gmail as an example. + + + + Navigate to the [Klavis Dashboard](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. + + Find the MCP server you like, here we use Gmail one as example. + + ![Klavis MCP Servers - No Code Creation](/images/mcp-server.png) + + + You'll get a server URL and OAuth URL for authentication. Keep these handy for the next steps. + + + + + Once you hit the create button, it will automatically redirect you to the OAuth flow. + + ![Gmail OAuth Flow](/images/knowledge-base/onboarding/gmail_oauth.png) + + + - Sign in to your Google account + - Grant Gmail permissions to Klavis + - You'll be redirected back automatically + - Your MCP server is now ready to use + + + Once OAuth is complete, your Gmail MCP server can read, send, and manage your emails. + + + + + +**Congratulations!** You've successfully created your first MCP server. + +## Next Steps +Ready to start using it? Continue to [Use Your First MCP Server](/knowledge-base/onboarding/use-your-first-mcp-server). diff --git a/docs/knowledge-base/onboarding/use-your-first-mcp-server.mdx b/docs/knowledge-base/onboarding/use-your-first-mcp-server.mdx new file mode 100644 index 00000000..ff86e568 --- /dev/null +++ b/docs/knowledge-base/onboarding/use-your-first-mcp-server.mdx @@ -0,0 +1,58 @@ +--- +title: "Use Your First MCP Server" +--- + +## Quick Start Guide + +Use your MCP server in OpenAI Playground (for free) in 2 simple steps. + + + + Go to [OpenAI Playground](https://platform.openai.com/chat/edit?models=gpt-4.1), then navigate to **Tools** → **Add** → **MCP Server**. + + ![Choose MCP Server](/images/knowledge-base/onboarding/step1_choose_mcp_server.png) + + + + + Enter your server configuration details and connect. + + ![Configure MCP Server](/images/knowledge-base/onboarding/step2_config_mcp_Server.png) + + + You can go to [Klavis Dashboard](https://www.klavis.ai/home) → **MCP Server** → **Manage Instances** to get your server URL if you forgot. + + ![Klavis Manage Instances](/images/knowledge-base/onboarding/klavis_manage_instances.png) + + + + + + + +**Congratulations!** You've successfully used your first MCP server. + +## What You Can Do with Gmail MCP Server + + + + Fetch and analyze your latest emails with AI assistance + + + Compose and send emails through AI agents + + + Find specific emails using natural language queries + + + Organize emails with labels and folders automatically + + diff --git a/docs/knowledge-base/use-mcp-server/chatgpt.mdx b/docs/knowledge-base/use-mcp-server/chatgpt.mdx new file mode 100644 index 00000000..2fd76f97 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/chatgpt.mdx @@ -0,0 +1,207 @@ +--- +title: "ChatGPT Connectors" +description: "Connect Strata with ChatGPT Connectors in minutes and supercharge your AI coding experience" +--- +ChatGPT +ChatGPT + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **Add to Other Clients** button to get your Strata Server URL. + + Copy Strata Server URL + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + To connect Klavis MCP Server with Connectors, you need to ensure **Developer Mode** is enabled. + - **Settings** -> **Apps & Connectors** -> **Advanced Settings** -> **Developer Mode** (enable it!) + + Developer Mode in ChatGPT + + **NOTE**: Developer Mode is required to add custom MCP servers in ChatGPT Connectors. See below screenshot for comparison: + + Disabled Developer Mode vs Enabled Developer Mode + + + + + Open a new chat in ChatGPT: + - Click on '**+**' icon or type `/` + - Select **Add Sources** > **Connect more** + + Now, fill the essential detail to add new connector: + - **Icon** (Optional): Add icon for your MCP (128 * 128 recommended) + - **Name**: `klavis-strata` + - **Description** (Optional): Short description of your MCP + - **MCP Server URL**: Paste Strata Server URL + - **Authentication**: select **No Authentication** + + Tick 'I trust this application' and click **Create** button + Adding Strata Server in ChatGPT Connector + + + Klavis AI authenticates your Strata server while generating server url. + + + + + + + To verify if Strata is configured properly in ChatGPT chat, click **"+"** -> **More** and you will see active servers. + + Strata Configuration in ChatGPT + + To verify server tools: + - Go to **Setting** > **Apps & Connectors** + - Select your MCP > Scroll to **Actions** tab + + Verify Tools Loading in ChatGPT + + + + + Using MCP Server in ChatGPT Chat + + Open ChatGPT Chat and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label. Use Tools" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes! Use my MCP server'" + ``` + + ```text Gmail + "Send email to john@company.com about project update. Use Tools" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date. Use my MCP server" + ``` + + + + šŸŽÆ ChatGPT Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** MCP server is now integrated with ChatGPT + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Try completely restarting ChatGPT + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart ChatGPT periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/claude_code.mdx b/docs/knowledge-base/use-mcp-server/claude_code.mdx new file mode 100644 index 00000000..151fa084 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/claude_code.mdx @@ -0,0 +1,195 @@ +--- +title: "Claude Code" +description: "Connect Strata with Claude Code in minutes and supercharge your AI coding experience" +--- +Claude Code +Claude Code + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Klavis Dashboard + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + Copy Your Server URL, open terminal and run the below command as shown in the screenshot below: + ```bash + claude mcp add klavis-strata npx mcp-remote + ``` + Claude Code command to add Strata + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Claude Code as described above + + + + + + + + To verify Strata server is configured properly, open Claude Code and run `/mcp` command. + + MCP Configuration in Claude Code + + To view available tools in the MCP server, navigate to Strata server and click `View Tools`. + + View Tools in Claude Code + + + šŸ“‹ **Restart Claude Code** in terminal to apply the new configuration + + + + + + Using MCP Server in Claude Code Chat + + Open Claude Code Chat and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Claude Code Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + + - Remove MCP Server: `claude mcp remove klavis-strata` + - List all MCP Servers: `claude mcp list` + - Details of Strata: `claude mcp get klavis-strata` + + + + + You're all set! Your **Strata** MCP server is now integrated with Claude Code. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Try completely restarting Claude Code + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Claude Code periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/claude_web_desktop.mdx b/docs/knowledge-base/use-mcp-server/claude_web_desktop.mdx new file mode 100644 index 00000000..bbc1d13a --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/claude_web_desktop.mdx @@ -0,0 +1,197 @@ +--- +title: "Claude Web/Desktop" +description: "Connect Strata with Claude Web/Desktop in minutes and supercharge your AI coding experience" +--- +Claude Web/Desktop +Claude Web/Desktop + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **Add to Claude** button to get your Strata Server URL. + + Copy Strata Server URL + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + Copy Your Strata Server URL, open Claude Web/Desktop settings: + - **macOS**: `Cmd + ,` or **Claude Desktop > Settings > Connectors** + - **Windows/Linux**: `Ctrl + ,` or **Claude Desktop > Settings > Connectors** + + Claude Web/Desktop Settings + + Select **"Add Custom Connector"**: + - **Name**: `strata` + - **Remote MCP Server URL**: Paste your Strata Server URL that you copied earlier + + Click **Add** to save the configuration. You can see strata added to the list. + + Strata Added in Web/Desktop Settings + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Claude Web/Desktop settings as described above + + + + + + + + + To verify **Strata** tools are loading correctly, open new chat and click **Search and Tools** button (settings icon) and you will see **strata** enabled. + + Verify Strata Tools in Claude Web/Desktop + + Open **strata** to see the list of tools available from your Strata MCP server. + + Strata Tools List in Claude Web/Desktop + + + šŸ“‹ **Reopen Claude Web/Desktop Settings** to apply the new configuration + + + + + + + Using Strata in Claude Web/Desktop Chat + + Open Claude Web/Desktop Chat (`Cmd/Ctrl + L`) and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Claude Web/Desktop Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** MCP server is now integrated with Claude Web/Desktop. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check Claude Web/Desktop logs for error messages + - Try completely restarting Claude Web/Desktop + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Claude Web/Desktop periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- diff --git a/docs/knowledge-base/use-mcp-server/cline.mdx b/docs/knowledge-base/use-mcp-server/cline.mdx new file mode 100644 index 00000000..ff9cdf36 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/cline.mdx @@ -0,0 +1,193 @@ +--- +title: "Cline" +description: "Connect Strata with Cline in minutes and supercharge your AI coding experience" +--- +Cline +Cline + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Klavis Dashboard + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Cline settings as described above + + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + Copy Your Server URL, click **Settings** -> **Manage MCP Servers** + + Manage MCP Servers in Cline + + Tap on **"Configure MCP Servers"** to open the configuration file. + + Configure MCP Servers in Cline + + Paste Your Server URL to **`cline_mcp_settings.json`** file like the screenshot below + + Paste Strata code in cline_mcp_settings.json file of Cline + + + + + + To verify that Strata tools are loaded correctly, navigate to **Manage MCP Servers** > **Configure MCP Servers**. + + Strata Tools Configuration in Cline + + + šŸ“‹ **Reopen Cline Settings** to apply the new configuration + + + + + + Using Strata in Cline Chat + + Open Cline Chat and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Cline Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** MCP server is now integrated with Cline in VS Code IDE. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check VS Code logs (View → Output → Cline) for errors + - Try completely restarting VS Code IDE + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart VS Code IDE periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/continue.mdx b/docs/knowledge-base/use-mcp-server/continue.mdx new file mode 100644 index 00000000..1a7b13ec --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/continue.mdx @@ -0,0 +1,194 @@ +--- +title: "Continue" +description: "Connect Strata with Continue in minutes and supercharge your AI coding experience" +--- +Continue +Continue + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Klavis Dashboard + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Continue settings as described above + + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + Copy Your Server URL, open **Configure Tools** (tools icon) -> **Tools** section + + MCP Server settings in Continue + + Click on "**+**" (plus icon) to create a new MCP server configuration for Strata + + Tools in Continue + + Paste Your Strata Server URL to **"new-mcp-server.yaml"** file like the screenshot below + + new-mcp-server.yaml file in Continue + + + + + + To verify that Strata tools are loaded in Continue: + 1. Open **Configure Tools** -> **Tools** + 2. Under **MCP Servers**, tap **klavis-strata** + 3. You should see the list of tools loaded from Strata server + + Strata Tool Configuration in Continue + + + šŸ“‹ **Reopen Continue Settings** to apply the new configuration + + + + + + Using Strata Server in Continue Chat + + Open Continue Chat (`Cmd/Ctrl + L`) and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Continue Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** is now integrated with Continue in VS Code IDE. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check VS Code logs (View → Output → Continue) for errors + - Try completely restarting VS Code IDE + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart VS Code IDE periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/cursor.mdx b/docs/knowledge-base/use-mcp-server/cursor.mdx new file mode 100644 index 00000000..0076154e --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/cursor.mdx @@ -0,0 +1,203 @@ +--- +title: "Cursor" +description: "Connect Strata with Cursor IDE in minutes and supercharge your AI coding experience" +--- +Cursor IDE +Cursor IDE + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + + + + From Klavis Dashboard, click **Add to Cursor** and you will be redirected to Cursor. + + Copy Strata Server URL + + Cursor will automatically open the **MCP** dialog with Strata Server URL pre-filled. Click **Install** to save the configuration. + + Install Strata in Cursor + + + + + Alternatively, you can configure **Strata** directly. + 1. Go to **Setting** -> **MCP & Tools** + 2. Click **Add Custom MCP** + + MCP & Tools in Cursor Settings + + 3. Paste the below code in **mcp.json** file (See Screenshot below) + + Paste Strata code in mcp.json + + + + + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Cursor settings as described above + + + + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + In Cursor Settings, navigate to **Tools & MCP**. You will see **klavis-strata** listed under **Installed MCP Servers**. + + Strata Configuration in Cursor + + Tap on **klavis-strata** to view the list of tools loaded from Strata server. + + Strata Tools List in Cursor IDE + + + šŸ“‹ **Reopen Cursor Settings** to apply the new configuration + + + + + + Using Strata in Cursor Chat + + Open Cursor Chat (`Cmd/Ctrl + L`) and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Cursor Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** is now integrated with Cursor IDE. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check Cursor IDE logs for error messages + - Try completely restarting Cursor IDE + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Cursor IDE periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/gemini_cli.mdx b/docs/knowledge-base/use-mcp-server/gemini_cli.mdx new file mode 100644 index 00000000..ff129b64 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/gemini_cli.mdx @@ -0,0 +1,179 @@ +--- +title: "Gemini CLI" +description: "Connect Strata with Gemini CLI in minutes and supercharge your AI coding experience" +--- +Gemini CLI +Gemini CLI + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Copy Strata Server URL + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + Copy Your Server URL and navigate to `~/.gemini/settings.json` on your computer + + + Paste Your Server URL to **settings.json** file like the screenshot below + + + Paste Strata Server URL in settings.json + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Gemini CLI settings as described above + + + + + + + + To verify that Strata is properly configured in Gemini CLI, run **`/mcp`** command in Gemini CLI Chat. + + Strata Tools Loading in Gemini CLI + + + šŸ“‹ **Restart** Gemini CLI to apply the new configuration + + + + + + Using Strata in Gemini CLI Chat + + Open Gemini CLI Chat and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Gemini CLI Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** is now integrated with Gemini CLI. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check Gemini CLI logs for error messages + - Try completely restarting Gemini CLI + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Gemini CLI periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/kiro.mdx b/docs/knowledge-base/use-mcp-server/kiro.mdx new file mode 100644 index 00000000..91bfbd66 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/kiro.mdx @@ -0,0 +1,173 @@ +--- +title: "Kiro" +description: "Connect Strata with Kiro in minutes and supercharge your AI workflow experience" +--- +Kiro +Kiro + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Klavis Dashboard + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + Copy Your Server URL and configure Kiro with your MCP server settings. + + Kiro Configuration + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Kiro settings as described above + + + + + Kiro mcp.json file + + + šŸ“‹ **Verify Tools Loading** - tools will automatically reload + + + + + + + Open Kiro and start using natural language to interact with your connected services: + + Using Strata in Kiro Chat + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Kiro automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** MCP server is now integrated with Kiro. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check Kiro logs for error messages + - Try completely restarting Kiro + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Kiro periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/n8n.mdx b/docs/knowledge-base/use-mcp-server/n8n.mdx new file mode 100644 index 00000000..601fc72f --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/n8n.mdx @@ -0,0 +1,251 @@ +--- +title: "n8n" +description: "Connect Strata with n8n in minutes and unlock powerful workflow automation" +--- +n8n +n8n + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + Klavis Dashboard consisting MCP Servers + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + 1. Navigate to **n8n** + 2. MCP URL with custom auth headers enabled: `https://strata.klavis.ai/mcp/` (Copy it) + 3. Click on **"Generate Token"** to get **Bearer Token** (Copy token & keep it secret!) + + Copy Strata Server URL + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + +## Build AI Workflow using Strata + + + + Begin your automation by creating a **New Workflow** in your n8n personal workspace. + n8n workflow creation + + + + + First Node in n8n + + Click the **Add First Step ('+' icon)** and select **Trigger Manually** to kickstart your workflow and you should see your first node being created. + + + + Adding AI Agent Node in n8n + + 1. Click **"+ (What happens next?)"** + 2. Navigate to **AI** → **AI Agents** from the menu + 3. Configure Your AI Agent: + - **Source for Prompt**: choose "Connected Chat Trigger Node" + - **Prompt**: `{{ $json.chatInput }}` (default setting) + 4. Click **Back to Canvas** to save the agent + + You should see **"When Chat Message Received"** and **"AI Agent"** nodes appear on the workflow diagram. + + Next Nodes appearing in n8n workflow + + + + + Including Chat Model + + 1. Click **"+ (Chat Model)"** -> **Anthropic Chat Model** (or your preferred model) + 2. Connect Chat Model to **n8n** using API Key + 3. Select **Model**: Claude Sonnet 4.5 (for example) + 4. Click **Back to Canvas** to lock in your chat model configuration + + Your **Chat Model** node now elegantly connects to your **AI Agent** node. + + n8n workflow now includes Chat Model + + + + + MCP Client in n8n + + 1. Click **"+ (Tool)"** -> **"MCP Client Tool"** + 2. Configure Your MCP Client: + - Endpoint: Paste `https://strata.klavis.ai/mcp/` + - Server Transport: **HTTP Streamable** + - Authentication: **Bearer Auth** + - Credentials for Bearer Auth: Paste **Bearer Token** that you copied earlier + - Tools to Include: **All** (highly recommended for maximum capabilities) + 3. Click **Back to Canvas** to save your MCP Client + + + + + + Bearer Auth connected to n8n workflow + + To connect your bearer account to n8n, simply paste **"Bearer Token"** that you copied from Klavis Dashboard. + 1. Navigate to **Connection** menu + 2. Bearer Token: Paste your **"Bearer Auth Token"** + 3. Allowed HTTP Request Domains: **All** (default settings) + + Now, AI workflow diagram has been completed and should look similar to screenshot below. + + Strata connected to n8n workflow + + + + + + Ready to test your workflow? Launch the chat interface in n8n by pressing **'C'** on your keyboard. + + Using Chat in n8n + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + Once your chat executes successfully, celebrate as **Green Checkmarks** appear across all nodes—including your Model and MCP Client! + + Final n8n workflow after executing the chat + + + + You're all set! Your **Strata** MCP server is now integrated with n8n. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check n8n logs for error messages + - Try completely restarting n8n + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers (or Use Strata) + - Check your internet connection speed + - Restart n8n periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- diff --git a/docs/knowledge-base/use-mcp-server/overview.mdx b/docs/knowledge-base/use-mcp-server/overview.mdx new file mode 100644 index 00000000..f359e8b7 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/overview.mdx @@ -0,0 +1,18 @@ +--- +title: "Overview" +description: "Learn how to use MCP Servers in your favorite Client" +--- + + + + + + + + + + + + + + diff --git a/docs/knowledge-base/use-mcp-server/vs_code.mdx b/docs/knowledge-base/use-mcp-server/vs_code.mdx new file mode 100644 index 00000000..c493e204 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/vs_code.mdx @@ -0,0 +1,199 @@ +--- +title: "VS Code" +description: "Connect Strata with VS Code in minutes and supercharge your AI coding experience" +--- +VS Code IDE +VS Code IDE + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + + + + From Klavis Dashboard, click **Add to VS Code** and you will be redirected to VS Code IDE. + + Copy Strata Server URL + + VS Code will automatically open the **MCP** dialog with Strata Server URL pre-filled. Click **Install** to save the configuration. + + Install Strata in VS Code + + + + + Copy Your Strata URL, open **Command Palette** in VS Code settings: + - **macOS**: `Cmd + Shift + P` + - **Windows/Linux**: `Ctrl + Shift + P` + - Or **Settings > Command Palette > Preferences: Open User Setting (JSON)** + + VS Code IDE Settings + + Paste Your Server URL to **settings.json** file like the screenshot below + + + Open User Setting (JSON) in VS Code + settings.json file in VS Code IDE + + + + + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in VS Code settings as described above + + + + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + In Github Copilot Chat, open **"Configure Tools"** (Tools icon) and scroll down to **MCP Server: klavis-strata**. You will see the list of available tools in Strata. + Strata Tools loading in VS Code + + + As a prerequisite, you should have **GitHub Copilot Chat** extension installed in your VS Code IDE. + + + + + + Using MCP Server in Github Copilot Chat + + Open Chat (`Cmd/Ctrl + I`) and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Github Copilot Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** is now integrated with Github Copilot in VS Code IDE. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check VS Code logs (View → Output → GitHub Copilot) for errors + - Try completely restarting VS Code IDE + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart VS Code IDE periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/knowledge-base/use-mcp-server/windsurf.mdx b/docs/knowledge-base/use-mcp-server/windsurf.mdx new file mode 100644 index 00000000..523f6072 --- /dev/null +++ b/docs/knowledge-base/use-mcp-server/windsurf.mdx @@ -0,0 +1,200 @@ +--- +title: "Windsurf" +description: "Connect Strata with Windsurf IDE in minutes and supercharge your AI coding experience" +--- +Windsurf IDE +Windsurf IDE + +## Quick Setup Guide + + + + Navigate to the [Klavis home page](https://www.klavis.ai/home) and click **"MCP Server"** in the left sidebar. You will see a list of MCP servers available in Klavis. + + Klavis home page + Click the **"Authorize"** button next to your chosen server. Once server is authorized, you will see **Green Checkmark** status. + + Authorize MCP Servers + + + You can authorize one or more servers to use with Strata + + + + + + + From Klavis Dashboard, click **"Add to Other Clients"** button to get your Strata Server URL. + + Klavis Dashboard + + **Copy** the strata URL to clipboard - you'll need this in the next step. + + Copy Strata Server URL + + + + - Automatically redirected to OAuth authorization + - Sign in to your account (GitHub, Google, Slack, etc.) + - Grant necessary permissions + - Redirected back to Klavis automatically + + + + - Prompted to enter an API key + - Follow service-specific instructions to generate key + - Paste key in the provided field + - Click **"Save"** to continue + + + + + + + Copy Your Server URL, open Windsurf IDE settings: + - **macOS**: `Cmd + ,` or **Windsurf > Settings > Advanced Settings** + - **Windows/Linux**: `Ctrl + ,` or **Windsurf > Settings > Advanced Settings** + + Windsurf IDE Settings + + Navigate to **Cascade** and click **Manage MCPs** > **View raw config**. + + Windsurf Cascade and MCPs settings + + Get Raw Config file in Windsurf + + Paste Your Server URL to **mcp_config.json** file like the screenshot below. + + Paste strata URL in mcp_config.json file of Windsurf IDE + + + You can add **Single** MCP Server URL directly here as well: + + 1. Navigate to **Target Server** and click **Three Dots** next to your server + 2. Tap **Show Individual Server URL** and copy the URL + 3. Paste this URL in Windsurf settings as described above + + + + + + + + + Strata Configuration in Windsurf + + + šŸ“‹ **Reopen Windsurf Settings** to apply the new configuration + + + + + + Using Strata in Windsurf Chat + + Open Windsurf Chat (`Cmd/Ctrl + Shift + L`) and start using natural language: + + + ```text GitHub + "Create a new issue titled 'Add dark mode' with priority label" + ``` + + ```text Slack + "Send a message to #general: 'Standup meeting in 5 minutes!'" + ``` + + ```text Gmail + "Send email to john@company.com about project update" + ``` + + ```text Notion + "Create a new page called 'Meeting Notes' with today's date" + ``` + + + + šŸŽÆ Windsurf Chat automatically detects when to use **Strata** based on context - no need to specify explicitly! + + + + + You're all set! Your **Strata** MCP server is now integrated with Windsurf IDE. + + + +## Troubleshooting + + + + - Double-check your Server URL for typos + - Ensure stable internet connection + - Verify authentication in Klavis dashboard + - Check Windsurf IDE logs for error messages + - Try completely restarting Windsurf IDE + + + + - Re-authenticate in the Klavis dashboard + - Check if your OAuth tokens have expired + - Verify API key permissions (for API key services) + - Ensure you've granted all necessary permissions + + + + - Limit the number of active MCP servers + - Check your internet connection speed + - Restart Windsurf IDE periodically + - Contact support if issues persist + + + +## Need Help? + + + + Join our Discord for community support and discussions + + + Contact our technical support team for assistance + + + +--- \ No newline at end of file diff --git a/docs/legacy/instance.mdx b/docs/legacy/instance.mdx new file mode 100644 index 00000000..e310b6e3 --- /dev/null +++ b/docs/legacy/instance.mdx @@ -0,0 +1,354 @@ +--- +title: MCP Server Instance +description: 1:1 Mapping API to MCP Server Tool +--- + + +This is the legacy approach to MCP servers. We recommend using [Strata MCP Server](/quickstart#strata) for better tool management and context window optimization. + + + + + + + Go to your Dashboard. + + + Choose an integration (for example, Gmail), and get the "individual server url" + + Individual Server URL + Individual Server URL + + + Complete Auth by Click "Anthorize" button. + + + Add to your favorite MCP-supported clients, such as Cursor, Claude Code, VS Code, ChatGPT, etc. + + + + + + + + ```bash pip + pip install klavis + ``` + + ```bash npm + npm install klavis + ``` + + + + + ```bash Curl + curl -X POST "https://api.klavis.ai/mcp-server/instance/create" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Gmail", + "userId": "user123" + }' + ``` + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_KLAVIS_API_KEY") + + server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="user123", + ) + print(server.server_url) + ``` + + ```javascript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavisClient = new KlavisClient({ apiKey: 'YOUR_KLAVIS_API_KEY' }); + const server = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: 'user123', + }); + console.log(server.serverUrl); + ``` + + + **Response Information**: The API returns: + - `serverUrl`: The URL you'll use to connect your MCP client to the individual MCP Server + - `oauthUrl`: Authorization link if the service requires OAuth authentication + + + Full Individual MCP Server endpoints + + + + + ```bash Curl + Copy and paste the OAuth URL into your web browser + ``` + ```python Python + import webbrowser + if getattr(server, 'oauth_url', None): + webbrowser.open(server.oauth_url) + ``` + + ```javascript TypeScript + if (server.oauthUrl) { + window?.open?.(server.oauthUrl); + } + ``` + + + **Authentication Methods**: + - **API Key**: See [API Key authentication guide](/auth/api-key) for details. + - **OAuth**: See [OAuth authentication guide](/auth/oauth) for details. + + + + šŸŽ‰ **Your MCP Server URL is ready to use!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + ```python Python + import os + import asyncio + import webbrowser + + from klavis import Klavis + from klavis.types import McpServerName + from langchain_openai import ChatOpenAI + from langchain_mcp_adapters.client import MultiServerMCPClient + from langgraph.prebuilt import create_react_agent + + from dotenv import load_dotenv + load_dotenv() + + async def main() -> None: + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a single MCP server (e.g., Gmail) + response = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="demo_user", + ) + + # Step 2: Handle OAuth authorization if needed + if hasattr(response, 'oauth_url') and response.oauth_url: + webbrowser.open(response.oauth_url) + input("Press Enter after completing OAuth authorization...") + + # Step 3: Create LangChain Agent with MCP Tools + mcp_client = MultiServerMCPClient({ + "gmail": { + "transport": "streamable_http", + "url": response.server_url, + } + }) + + # Get all available tools from the server + tools = await mcp_client.get_tools() + # Setup LLM + llm = ChatOpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 4: Create LangChain agent with MCP tools + agent = create_react_agent( + model=llm, + tools=tools, + prompt=( + "You are a helpful assistant that can use MCP tools. " + ), + ) + + # Step 5: Invoke the agent + result = await agent.ainvoke({ + "messages": [{"role": "user", "content": "Search my inbox for unread emails and summarize."}], + }) + + # Print only the final AI response content + print(result["messages"][-1].content) + + if __name__ == "__main__": + asyncio.run(main()) + ``` + + + + + ```python Python + import os + import asyncio + import webbrowser + + from klavis import Klavis + from klavis.types import McpServerName + from llama_index.llms.openai import OpenAI + from llama_index.core.agent.workflow import FunctionAgent + from llama_index.tools.mcp import BasicMCPClient + from llama_index.tools.mcp import ( + aget_tools_from_mcp_url, + ) + + from dotenv import load_dotenv + load_dotenv() + + async def main() -> None: + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a single MCP server (e.g., Gmail) + response = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="demo_user", + ) + + # Step 2: Handle OAuth authorization if needed + if hasattr(response, 'oauth_url') and response.oauth_url: + webbrowser.open(response.oauth_url) + input("Press Enter after completing OAuth authorization...") + + # Step 3: Create LlamaIndex Agent with MCP Tools + tools = await aget_tools_from_mcp_url( + response.server_url, + client=BasicMCPClient(response.server_url) + ) + + # Setup LLM + llm = OpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 4: Create LlamaIndex agent with MCP tools + agent = FunctionAgent( + name="gmail_agent", + description="Agent using Gmail MCP tools", + tools=tools, + llm=llm, + system_prompt=( + "You are a helpful assistant that can use MCP tools. " + ), + ) + + # Step 5: Invoke the agent + result = await agent.run( + "Search my inbox for unread emails and summarize." + ) + + # Print the response + print(result) + + if _name_ == "_main_": + asyncio.run(main()) + + ``` + + + + Coming soon + + + + ```python Python + import os + import asyncio + import webbrowser + + from dotenv import load_dotenv + from klavis import Klavis + from klavis.types import McpServerName + from autogen_agentchat.agents import AssistantAgent + from autogen_agentchat.ui import Console + from autogen_core import CancellationToken + from autogen_ext.models.openai import OpenAIChatCompletionClient + from autogen_ext.tools.mcp import StreamableHttpMcpToolAdapter, StreamableHttpServerParams + from autogen_ext.tools.mcp import mcp_server_tools + + + load_dotenv() + + async def main() -> None: + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Create MCP server instance + response = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="demo_user", + ) + + # Handle OAuth authorization if required + if getattr(response, "oauth_url", None): + webbrowser.open(response.oauth_url) + input("Press Enter after completing OAuth authorization...") + + server_params = StreamableHttpServerParams( + url=response.server_url, + timeout=30.0, + sse_read_timeout=300.0, + terminate_on_close=True, + ) + + adapters = await mcp_server_tools(server_params) + + model_client = OpenAIChatCompletionClient(model="gpt-4") + agent = AssistantAgent( + name="MailAI", + model_client=model_client, + tools=adapters, + system_message="You are a helpful Gmail assistant.", + ) + + await Console( + agent.run_stream( + task="Find My Latest Emails", + cancellation_token=CancellationToken() + ) + ) + + if __name__ == "__main__": + asyncio.run(main()) + ``` + + + + + + + + + Visit https://github.com/Klavis-AI/klavis/mcp_servers to view the source code and find more information + + + + + ```bash Docker + docker run -p 5000:5000 ghcr.io/klavis-ai/gmail-mcp-server:latest + ``` + + + Browse all available MCP server Docker images at [GitHub Packages](https://github.com/orgs/Klavis-AI/packages?repo_name=klavis) + + + + Use the local URL (for example, http://localhost:5000) in your client or aggregator. + + + Add TLS, auth, and deploy behind your gateway as needed. + + + + + + +For the recommended approach with better tool management and progressive discovery, see [Strata MCP Server](/quickstart#strata). + diff --git a/docs/mcp-server/affinity.mdx b/docs/mcp-server/affinity.mdx new file mode 100644 index 00000000..e17bbe92 --- /dev/null +++ b/docs/mcp-server/affinity.mdx @@ -0,0 +1,220 @@ +--- +title: 'Affinity' +description: 'Connect AI agents to Affinity CRM for managing contacts, deals, and relationships' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Affinity CRM to manage relationships, track deals, and organize contacts through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Affinity + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.AFFINITY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Affinity + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Affinity], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Affinity"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.AFFINITY, + auth_data={ + "api_key": "YOUR_AFFINITY_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Affinity, + authData: { + api_key: "YOUR_AFFINITY_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Affinity", + "authData": { + "api_key": "YOUR_AFFINITY_API_KEY" + } + }' + ``` + + + + Get your Affinity API key from your [Affinity Settings](https://app.affinity.co/settings/api). + + + + šŸŽ‰ **Your Affinity MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Affinity from the list of available integrations. + + + + Complete the OAuth flow to connect your Affinity account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/affinity + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/affinity-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/affinity-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_affinity_api_key"}' \ + ghcr.io/klavis-ai/affinity-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "affinity": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Affinity. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/airtable.mdx b/docs/mcp-server/airtable.mdx new file mode 100644 index 00000000..50558e78 --- /dev/null +++ b/docs/mcp-server/airtable.mdx @@ -0,0 +1,202 @@ +--- +title: 'Airtable' +description: 'Connect AI agents to Airtable for managing databases, records, and workflows' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Airtable to manage databases, automate workflows, and sync data through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.AIRTABLE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Airtable + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Airtable], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Airtable"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.AIRTABLE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Airtable]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/airtable/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Airtable MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Airtable from the list of available integrations. + + + + Complete the OAuth flow to connect your Airtable account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/airtable + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/airtable-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/airtable-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_airtable_api_key"}' \ + ghcr.io/klavis-ai/airtable-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "airtable": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Airtable. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/asana.mdx b/docs/mcp-server/asana.mdx new file mode 100644 index 00000000..ae743b9b --- /dev/null +++ b/docs/mcp-server/asana.mdx @@ -0,0 +1,202 @@ +--- +title: 'Asana' +description: 'Connect AI agents to Asana for managing projects, tasks, and team collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Asana to manage projects, track tasks, and collaborate with your team through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.ASANA], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Asana + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Asana], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Asana"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.ASANA]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Asana]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/asana/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Asana MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Asana from the list of available integrations. + + + + Complete the OAuth flow to connect your Asana account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/asana + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/asana-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/asana-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_asana_api_key"}' \ + ghcr.io/klavis-ai/asana-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "asana": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Asana. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/attio.mdx b/docs/mcp-server/attio.mdx new file mode 100644 index 00000000..17023b02 --- /dev/null +++ b/docs/mcp-server/attio.mdx @@ -0,0 +1,202 @@ +--- +title: 'Attio' +description: 'Connect AI agents to Attio for CRM management and customer relationship tracking' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Attio to manage your CRM data, track customer relationships, and automate sales processes through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.ATTIO], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Attio + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Attio], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Attio"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.ATTIO]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Attio]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/attio/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Attio MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Attio from the list of available integrations. + + + + Complete the OAuth flow to connect your Attio workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/attio + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/attio-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/attio-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_attio_token"}' \ + ghcr.io/klavis-ai/attio-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "attio": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Attio. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/box.mdx b/docs/mcp-server/box.mdx new file mode 100644 index 00000000..ed939da2 --- /dev/null +++ b/docs/mcp-server/box.mdx @@ -0,0 +1,206 @@ +--- +title: 'Box' +description: 'Connect AI agents to Box for managing files and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Box to manage files and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Box + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.BOX], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Box + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Box], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Box"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.BOX]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Box]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/box/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Box API key from your [Box Developer Console](https://developer.box.com/). + + + + šŸŽ‰ **Your Box MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Box from the list of available integrations. + + + + Complete the OAuth flow to connect your Box account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/box + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/box-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/box-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_box_api_key"}' \ + ghcr.io/klavis-ai/box-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "box": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Box. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/brave_search.mdx b/docs/mcp-server/brave_search.mdx new file mode 100644 index 00000000..16d3b8ad --- /dev/null +++ b/docs/mcp-server/brave_search.mdx @@ -0,0 +1,211 @@ +--- +title: 'Brave Search' +description: 'Connect AI agents to Brave Search for web, image, news, and video search capabilities' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Brave Search to perform comprehensive web searches, including web results, images, news, and videos through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Brave Search + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.BRAVE_SEARCH], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Brave Search + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.BraveSearch], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Brave Search"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.BRAVE_SEARCH, + auth_data={ + "api_key": "YOUR_BRAVE_SEARCH_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.BraveSearch, + authData: { + api_key: "YOUR_BRAVE_SEARCH_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Brave Search", + "authData": { + "api_key": "YOUR_BRAVE_SEARCH_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Brave Search MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Brave Search from the list of available integrations. + + + + Enter your Brave Search API key to enable search functionality. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/brave_search + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/brave-search-mcp-server:latest + + # Run with Brave Search API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_brave_search_api_key"}' \ + ghcr.io/klavis-ai/brave-search-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "brave_search": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Brave Search. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/cal_com.mdx b/docs/mcp-server/cal_com.mdx new file mode 100644 index 00000000..1d74bd28 --- /dev/null +++ b/docs/mcp-server/cal_com.mdx @@ -0,0 +1,202 @@ +--- +title: 'Cal.com' +description: 'Connect AI agents to Cal.com for scheduling and calendar management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Cal.com to manage schedules, create appointments, and automate calendar operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CAL_COM], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Cal.com + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.CalCom], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["CalCom"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CAL_COM]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.CalCom]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/cal_com/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Cal.com MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Cal.com from the list of available integrations. + + + + Complete the OAuth flow to connect your Cal.com account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/cal_com + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/cal-com-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/cal-com-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_cal_com_token"}' \ + ghcr.io/klavis-ai/cal-com-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "cal_com": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Cal.com. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/calendly.mdx b/docs/mcp-server/calendly.mdx new file mode 100644 index 00000000..8a844e11 --- /dev/null +++ b/docs/mcp-server/calendly.mdx @@ -0,0 +1,203 @@ +--- +title: 'Calendly' +description: 'Connect AI agents to Calendly for automated scheduling and calendar management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Calendly to automate scheduling, manage appointments, and integrate calendar functionality through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CALENDLY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Calendly + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Calendly], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Calendly"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CALENDLY]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Calendly]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/calendly/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Calendly MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Calendly from the list of available integrations. + + + + Complete the OAuth flow to connect your Calendly account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/calendly + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/calendly-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/calendly-mcp-server:latest + + # Or run with manual OAuth token + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_calendly_oauth_token"}' \ + ghcr.io/klavis-ai/calendly-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "calendly": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Calendly. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/clickup.mdx b/docs/mcp-server/clickup.mdx new file mode 100644 index 00000000..cc0390d9 --- /dev/null +++ b/docs/mcp-server/clickup.mdx @@ -0,0 +1,203 @@ +--- +title: 'ClickUp' +description: 'Connect AI agents to ClickUp for project management and task tracking' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to ClickUp to manage projects, track tasks, and automate workflow operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CLICKUP], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with ClickUp + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.ClickUp], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["ClickUp"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CLICKUP]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.ClickUp]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/clickup/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your ClickUp MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select ClickUp from the list of available integrations. + + + + Complete the OAuth flow to connect your ClickUp workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/clickup + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/clickup-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/clickup-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_clickup_token"}' \ + ghcr.io/klavis-ai/clickup-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "clickup": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for ClickUp. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/close.mdx b/docs/mcp-server/close.mdx new file mode 100644 index 00000000..ce822ee1 --- /dev/null +++ b/docs/mcp-server/close.mdx @@ -0,0 +1,198 @@ +--- +title: 'Close' +description: 'Connect AI agents to Close CRM for sales pipeline management and lead automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Close CRM to manage leads, contacts, opportunities, and automate sales workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CLOSE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Close + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Close], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Close"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CLOSE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Close]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/close/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Close CRM MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Close from the list of available integrations. + + + + Complete the OAuth flow to connect your Close CRM account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/close + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/close-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/close-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "close": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Close. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/cloudflare.mdx b/docs/mcp-server/cloudflare.mdx new file mode 100644 index 00000000..d0bb2d91 --- /dev/null +++ b/docs/mcp-server/cloudflare.mdx @@ -0,0 +1,212 @@ +--- +title: 'Cloudflare' +description: 'Connect AI agents to Cloudflare for CDN, security, and edge computing management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Cloudflare to manage DNS records, deploy Workers, configure firewalls, and optimize web performance through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Cloudflare + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CLOUDFLARE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Cloudflare + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Cloudflare], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Cloudflare"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.CLOUDFLARE, + auth_data={ + "api_key": "YOUR_CLOUDFLARE_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Cloudflare, + authData: { + api_key: "YOUR_CLOUDFLARE_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Cloudflare", + "authData": { + "api_key": "YOUR_CLOUDFLARE_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Cloudflare MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Cloudflare from the list of available integrations. + + + + Enter your Cloudflare API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/cloudflare + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/cloudflare-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/cloudflare-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "cloudflare": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Cloudflare. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/coinbase.mdx b/docs/mcp-server/coinbase.mdx new file mode 100644 index 00000000..3c853c6e --- /dev/null +++ b/docs/mcp-server/coinbase.mdx @@ -0,0 +1,215 @@ +--- +title: 'Coinbase' +description: 'Connect AI agents to Coinbase for cryptocurrency data and portfolio management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Coinbase to access cryptocurrency prices, manage portfolios, and track transactions through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Coinbase + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.COINBASE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Coinbase + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Coinbase], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Coinbase"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.COINBASE, + auth_data={ + "api_key": "YOUR_COINBASE_API_KEY", + "secret_key": "YOUR_COINBASE_API_SECRET" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Coinbase, + authData: { + api_key: "YOUR_COINBASE_API_KEY", + secret_key: "YOUR_COINBASE_API_SECRET" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Coinbase", + "authData": { + "api_key": "YOUR_COINBASE_API_KEY", + "secret_key": "YOUR_COINBASE_API_SECRET" + } + }' + ``` + + + + šŸŽ‰ **Your Coinbase MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Coinbase from the list of available integrations. + + + + Enter your Coinbase API key and secret to enable cryptocurrency data access. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/coinbase + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/coinbase-mcp-server:latest + + # Run with Coinbase API credentials + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_coinbase_api_key","secret_key":"your_coinbase_secret"}' \ + ghcr.io/klavis-ai/coinbase-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "coinbase": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Coinbase. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + diff --git a/docs/mcp-server/confluence.mdx b/docs/mcp-server/confluence.mdx new file mode 100644 index 00000000..1631f594 --- /dev/null +++ b/docs/mcp-server/confluence.mdx @@ -0,0 +1,203 @@ +--- +title: 'Confluence' +description: 'Connect AI agents to Confluence for document management and collaborative editing' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Confluence to manage documents, create and edit pages, organize spaces, and collaborate on content through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CONFLUENCE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Confluence + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Confluence], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Confluence"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CONFLUENCE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Confluence]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/confluence/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Confluence MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Confluence from the list of available integrations. + + + + Complete the OAuth flow to connect your Confluence workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/confluence + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/confluence-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/confluence-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_confluence_token"}' \ + ghcr.io/klavis-ai/confluence-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "confluence": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Confluence. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/dialpad.mdx b/docs/mcp-server/dialpad.mdx new file mode 100644 index 00000000..eb724ff3 --- /dev/null +++ b/docs/mcp-server/dialpad.mdx @@ -0,0 +1,206 @@ +--- +title: 'Dialpad' +description: 'Connect to Dialpad for AI-Powered Customer Communication Platform' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Dialpad for AI-Powered Customer Communication Platform + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Dialpad + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.DIALPAD], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Dialpad + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.DIALPAD], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["DIALPAD"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.DIALPAD]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.DIALPAD]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/dialpad/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Dialpad API key from your [Dialpad Developer Console](https://developers.dialpad.com/). + + + + šŸŽ‰ **Your Dialpad MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Dialpad from the list of available integrations. + + + + Complete the OAuth flow to connect your Dialpad account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/dialpad + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/dialpad-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/dialpad-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_dialpad_api_key"}' \ + ghcr.io/klavis-ai/dialpad-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "dialpad": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Dialpad. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/discord.mdx b/docs/mcp-server/discord.mdx new file mode 100644 index 00000000..507593b7 --- /dev/null +++ b/docs/mcp-server/discord.mdx @@ -0,0 +1,227 @@ +--- +title: 'Discord' +description: 'Connect AI agents to Discord for server management and communication' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Discord to manage servers, send messages, and interact with communities through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Discord + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.DISCORD], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Discord + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Discord], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Discord"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.DISCORD, + auth_data={ + "data": { + "bot_token": "YOUR_DISCORD_BOT_TOKEN" + } + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Discord, + authData: { + data: { + bot_token: "YOUR_DISCORD_BOT_TOKEN" + } + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Discord", + "authData": { + "data": { + "bot_token": "YOUR_DISCORD_BOT_TOKEN" + } + } + }' + ``` + + + + Get your Discord bot token from the [Discord Developer Portal](https://discord.com/developers/applications). You'll also need to add the bot to your Discord server with the required permissions. + + + + šŸŽ‰ **Your Discord MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Discord from the list of available integrations. + + + + Complete the OAuth flow to connect your Discord bot. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/discord + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/discord-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/discord-mcp-server:latest + + # Or run with manual bot token + docker run -p 5000:5000 \ + -e AUTH_DATA='{"bot_token":"your_discord_bot_token"}' \ + ghcr.io/klavis-ai/discord-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "discord": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Discord. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/doc2markdown.mdx b/docs/mcp-server/doc2markdown.mdx new file mode 100644 index 00000000..37a6b60a --- /dev/null +++ b/docs/mcp-server/doc2markdown.mdx @@ -0,0 +1,175 @@ +--- +title: 'Doc2markdown' +description: 'Connect AI agents to Doc2markdown for document format conversion' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Doc2markdown to convert various document formats (PDF, PowerPoint, Word, Excel, HTML, ZIP files, and EPubs) to markdown through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Doc2markdown + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.DOC2MARKDOWN], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Doc2markdown + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Doc2markdown], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Doc2markdown"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + šŸŽ‰ **Your Doc2markdown MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Doc2markdown from the list of available integrations. + + + + Enter your Doc2markdown API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/doc2markdown + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/doc2markdown-mcp-server:latest + + # Run with API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_doc2markdown_api_key"}' \ + ghcr.io/klavis-ai/doc2markdown-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "doc2markdown": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Doc2markdown. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/docusign.mdx b/docs/mcp-server/docusign.mdx new file mode 100644 index 00000000..865e1fab --- /dev/null +++ b/docs/mcp-server/docusign.mdx @@ -0,0 +1,206 @@ +--- +title: 'Docusign' +description: 'Connect AI agents to Docusign for Electronic Signatures and Agreements' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect AI agents to Docusign for Electronic Signatures and Agreements + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Docusign + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.DOCUSIGN], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Docusign + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.DOCUSIGN], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["DOCUSIGN"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.DOCUSIGN]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.DOCUSIGN]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/docusign/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Docusign API key from your [Docusign Developer Console](https://developers.docusign.com/). + + + + šŸŽ‰ **Your Docusign MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Docusign from the list of available integrations. + + + + Complete the OAuth flow to connect your Docusign account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/docusign + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/docusign-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/docusign-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_docusign_api_key"}' \ + ghcr.io/klavis-ai/docusign-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "docusign": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Docusign. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/dropbox.mdx b/docs/mcp-server/dropbox.mdx new file mode 100644 index 00000000..6834e210 --- /dev/null +++ b/docs/mcp-server/dropbox.mdx @@ -0,0 +1,203 @@ +--- +title: 'Dropbox' +description: 'Connect AI agents to Dropbox for file storage and collaboration management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Dropbox to manage files, organize folders, share content, and automate file operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.DROPBOX], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Dropbox + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Dropbox], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Dropbox"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.DROPBOX]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Dropbox]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/dropbox/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Dropbox MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Dropbox from the list of available integrations. + + + + Complete the OAuth flow to connect your Dropbox account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/dropbox + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/dropbox-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/dropbox-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_dropbox_token"}' \ + ghcr.io/klavis-ai/dropbox-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "dropbox": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Dropbox. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/elevenlabs.mdx b/docs/mcp-server/elevenlabs.mdx new file mode 100644 index 00000000..41071119 --- /dev/null +++ b/docs/mcp-server/elevenlabs.mdx @@ -0,0 +1,206 @@ +--- +title: 'ElevenLabs' +description: 'Connect AI agents to ElevenLabs for managing voice and audio' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to ElevenLabs to manage voice and audio through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with ElevenLabs + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.ELEVENLABS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with ElevenLabs + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.ELEVENLABS], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["ElevenLabs"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.ELEVENLABS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.ELEVENLABS]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/elevenlabs/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your ElevenLabs API key from your [ElevenLabs Developer Console](https://elevenlabs.io/developers). + + + + šŸŽ‰ **Your ElevenLabs MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select ElevenLabs from the list of available integrations. + + + + Complete the OAuth flow to connect your ElevenLabs account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/elevenlabs + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/elevenlabs-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/elevenlabs-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_elevenlabs_api_key"}' \ + ghcr.io/klavis-ai/elevenlabs-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "elevenlabs": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for ElevenLabs. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/exa.mdx b/docs/mcp-server/exa.mdx new file mode 100644 index 00000000..b4c94ee0 --- /dev/null +++ b/docs/mcp-server/exa.mdx @@ -0,0 +1,216 @@ +--- +title: 'Exa' +description: 'Connect AI agents to Exa for AI-powered semantic search and content discovery' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Exa to perform AI-powered semantic search, content retrieval, and comprehensive research through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Exa + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.EXA], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Exa + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Exa], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Exa"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.EXA, + auth_data={ + "api_key": "YOUR_EXA_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Exa, + authData: { + api_key: "YOUR_EXA_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Exa", + "authData": { + "api_key": "YOUR_EXA_API_KEY" + } + }' + ``` + + + + Get your Exa API key from the [Exa AI website](https://docs.exa.ai/reference/getting-started). + + + + šŸŽ‰ **Your Exa MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Exa from the list of available integrations. + + + + Enter your Exa API key to enable semantic search functionality. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/exa + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/exa-mcp-server:latest + + # Run with Exa API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_exa_api_key"}' \ + ghcr.io/klavis-ai/exa-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "exa": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Exa. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + diff --git a/docs/mcp-server/fathom.mdx b/docs/mcp-server/fathom.mdx new file mode 100644 index 00000000..42f0f94f --- /dev/null +++ b/docs/mcp-server/fathom.mdx @@ -0,0 +1,203 @@ +--- +title: 'Fathom' +description: 'Connect to Fathom for AI-Powered Notes and Summaries' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Fathom for AI-Powered Notes and Summaries + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Fathom + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FATHOM], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Fathom + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.FATHOM], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Fathom"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.FATHOM]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.FATHOM]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/fathom/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + + šŸŽ‰ **Your Fathom integration is ready!** + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Fathom from the list of available integrations. + + + + Complete the OAuth flow to connect your Fathom account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/fathom + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/fathom-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/fathom-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_fathom_api_key"}' \ + ghcr.io/klavis-ai/fathom-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "fathom": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Fathom. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/figma.mdx b/docs/mcp-server/figma.mdx new file mode 100644 index 00000000..ae9df7c7 --- /dev/null +++ b/docs/mcp-server/figma.mdx @@ -0,0 +1,203 @@ +--- +title: 'Figma' +description: 'Connect AI agents to Figma for design collaboration and asset management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Figma to access design files, manage assets, retrieve comments, and collaborate on design projects through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FIGMA], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Figma + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Figma], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Figma"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.FIGMA]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Figma]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/figma/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Figma MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Figma from the list of available integrations. + + + + Complete the OAuth flow to connect your Figma account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/figma + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/figma-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/figma-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_figma_api_key"}' \ + ghcr.io/klavis-ai/figma-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "figma": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Figma. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/firecrawl-deep-research.mdx b/docs/mcp-server/firecrawl-deep-research.mdx new file mode 100644 index 00000000..83b2d3bc --- /dev/null +++ b/docs/mcp-server/firecrawl-deep-research.mdx @@ -0,0 +1,216 @@ +--- +title: 'Firecrawl Deep Research' +description: 'Connect AI agents to Firecrawl Deep Research for comprehensive web-based research and analysis' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Firecrawl Deep Research to perform comprehensive web-based research, analysis, and in-depth topic exploration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Firecrawl Deep Research + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FIRECRAWL_DEEP_RESEARCH], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Firecrawl Deep Research + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.FirecrawlDeepResearch], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Firecrawl Deep Research"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + You can get your Firecrawl API key from the [Firecrawl website](https://firecrawl.dev/). + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.FIRECRAWL_DEEP_RESEARCH, + auth_data={ + "api_key": "YOUR_FIRECRAWL_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.FirecrawlDeepResearch, + authData: { + api_key: "YOUR_FIRECRAWL_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Firecrawl Deep Research", + "authData": { + "api_key": "YOUR_FIRECRAWL_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Firecrawl Deep Research MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Firecrawl Deep Research from the list of available integrations. + + + + Enter your Firecrawl API key to enable research functionality. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/firecrawl_deep_research + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/firecrawl-deep-research-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/firecrawl-deep-research-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "firecrawl_deep": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Firecrawl Deep Research. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/firecrawl-web-search.mdx b/docs/mcp-server/firecrawl-web-search.mdx new file mode 100644 index 00000000..7f34a180 --- /dev/null +++ b/docs/mcp-server/firecrawl-web-search.mdx @@ -0,0 +1,216 @@ +--- +title: 'Firecrawl Web Search' +description: 'Connect AI agents to Firecrawl Web Search for advanced web scraping and content extraction' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Firecrawl Web Search to perform advanced web scraping, content extraction, and automated web crawling through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Firecrawl Web Search + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FIRECRAWL_WEB_SEARCH], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Firecrawl Web Search + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.FirecrawlWebSearch], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Firecrawl Web Search"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + You can get your Firecrawl API key from the [Firecrawl website](https://firecrawl.dev/). + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.FIRECRAWL_WEB_SEARCH, + auth_data={ + "api_key": "YOUR_FIRECRAWL_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.FirecrawlWebSearch, + authData: { + api_key: "YOUR_FIRECRAWL_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Firecrawl Web Search", + "authData": { + "api_key": "YOUR_FIRECRAWL_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Firecrawl Web Search MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Firecrawl Web Search from the list of available integrations. + + + + Enter your Firecrawl API key to enable web scraping functionality. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/firecrawl + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/firecrawl-web-search-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/firecrawl-web-search-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "firecrawl": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Firecrawl Web Search. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/fireflies.mdx b/docs/mcp-server/fireflies.mdx new file mode 100644 index 00000000..285b2bcb --- /dev/null +++ b/docs/mcp-server/fireflies.mdx @@ -0,0 +1,212 @@ +--- +title: 'Fireflies' +description: 'Connect AI agents to Fireflies for automated meeting transcription, note-taking, and conversation analysis' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Fireflies to transcribe meetings, extract insights, search conversations, and automate meeting workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Fireflies + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FIREFLIES], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Fireflies + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Fireflies], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Fireflies"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.FIREFLIES, + auth_data={ + "api_key": "YOUR_FIREFLIES_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Fireflies, + authData: { + api_key: "YOUR_FIREFLIES_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Fireflies", + "authData": { + "api_key": "YOUR_FIREFLIES_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Fireflies MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Fireflies from the list of available integrations. + + + + Enter your Fireflies API key to connect. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/fireflies + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/fireflies-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/fireflies-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "fireflies": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Fireflies. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + diff --git a/docs/mcp-server/freshdesk.mdx b/docs/mcp-server/freshdesk.mdx new file mode 100644 index 00000000..fd16d290 --- /dev/null +++ b/docs/mcp-server/freshdesk.mdx @@ -0,0 +1,206 @@ +--- +title: 'Freshdesk' +description: 'Connect AI agents to Freshdesk for managing customer support' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Freshdesk to manage customer support through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Freshdesk + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.FRESHDESK], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Freshdesk + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Freshdesk], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Freshdesk"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.FRESHDESK]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Freshdesk]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/freshdesk/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Freshdesk API key from your [Freshdesk Developer Console](https://developers.freshdesk.com/). + + + + šŸŽ‰ **Your Freshdesk MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Freshdesk from the list of available integrations. + + + + Complete the OAuth flow to connect your Freshdesk account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/freshdesk + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/freshdesk-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/freshdesk-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_freshdesk_api_key"}' \ + ghcr.io/klavis-ai/freshdesk-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "freshdesk": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Freshdesk. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/github.mdx b/docs/mcp-server/github.mdx new file mode 100644 index 00000000..64489cbb --- /dev/null +++ b/docs/mcp-server/github.mdx @@ -0,0 +1,203 @@ +--- +title: 'GitHub' +description: 'Connect AI agents to GitHub for repository management and code collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to GitHub to manage repositories, track issues, and collaborate on code through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GITHUB], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with GitHub + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Github], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["GitHub"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GITHUB]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Github]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/github/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your GitHub MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select GitHub from the list of available integrations. + + + + Complete the OAuth flow to connect your GitHub account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/github + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/github-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/github-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_github_token"}' \ + ghcr.io/klavis-ai/github-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "github": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for GitHub. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/gitlab.mdx b/docs/mcp-server/gitlab.mdx new file mode 100644 index 00000000..c8b48a6d --- /dev/null +++ b/docs/mcp-server/gitlab.mdx @@ -0,0 +1,206 @@ +--- +title: 'GitLab' +description: 'Connect AI agents to GitLab for source control, CI/CD, and DevOps automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to GitLab to manage source control, CI/CD, and DevOps automation through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Gitlab + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GITLAB], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Gitlab + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gitlab], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Gitlab"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GITLAB]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Gitlab]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gitlab/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Gitlab API key from your [Gitlab Developer Console](https://developer.gitlab.com/). + + + + šŸŽ‰ **Your Gitlab MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Gitlab from the list of available integrations. + + + + Complete the OAuth flow to connect your Gitlab account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/gitlab + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/gitlab-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gitlab-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_gitlab_api_key"}' \ + ghcr.io/klavis-ai/gitlab-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "gitlab": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for GitLab. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/gmail.mdx b/docs/mcp-server/gmail.mdx new file mode 100644 index 00000000..53ba0c87 --- /dev/null +++ b/docs/mcp-server/gmail.mdx @@ -0,0 +1,203 @@ +--- +title: 'Gmail' +description: 'Connect AI agents to Gmail for email management and communication' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Gmail to manage emails, organize conversations, and automate communication through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Gmail + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Gmail"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GMAIL]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Gmail]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gmail/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Gmail MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Gmail from the list of available integrations. + + + + Complete the OAuth flow to connect your Gmail account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/gmail + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/gmail-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gmail-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_gmail_token"}' \ + ghcr.io/klavis-ai/gmail-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "gmail": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Gmail. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/gong.mdx b/docs/mcp-server/gong.mdx new file mode 100644 index 00000000..a642187c --- /dev/null +++ b/docs/mcp-server/gong.mdx @@ -0,0 +1,216 @@ +--- +title: 'Gong' +description: 'Connect AI agents to Gong for sales conversation intelligence and call analytics' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Gong to access sales conversation intelligence, analyze call transcripts, and extract insights from sales meetings through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Gong + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GONG], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Gong + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gong], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Gong"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + You can get your Gong API key from your Gong admin settings. Contact your Gong administrator for access. + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.GONG, + auth_data={ + "api_key": "YOUR_GONG_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Gong, + authData: { + api_key: "YOUR_GONG_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Gong", + "authData": { + "api_key": "YOUR_GONG_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Gong MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Gong from the list of available integrations. + + + + Enter your Gong API key to enable conversation intelligence access. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/gong + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/gong-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gong-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "gong": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Gong. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/google_calendar.mdx b/docs/mcp-server/google_calendar.mdx new file mode 100644 index 00000000..e2dc2bfc --- /dev/null +++ b/docs/mcp-server/google_calendar.mdx @@ -0,0 +1,198 @@ +--- +title: 'Google Calendar' +description: 'Connect AI agents to Google Calendar for event management and scheduling automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Google Calendar to manage events, schedule meetings, and automate calendar operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GOOGLE_CALENDAR], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Google Calendar + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.GoogleCalendar], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Google Calendar", + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GOOGLE_CALENDAR]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.GoogleCalendar]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gcalendar/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Google Calendar MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Google Calendar from the list of available integrations. + + + + Complete the OAuth flow to connect your Google Calendar account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/google_calendar + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/google-calendar-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-calendar-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "google_calendar": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Google Calendar. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/google_docs.mdx b/docs/mcp-server/google_docs.mdx new file mode 100644 index 00000000..17bcdf5c --- /dev/null +++ b/docs/mcp-server/google_docs.mdx @@ -0,0 +1,198 @@ +--- +title: 'Google Docs' +description: 'Connect AI agents to Google Docs for document creation and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Google Docs to create, edit, and manage Google Docs documents and collaborate on content through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GOOGLE_DOCS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Google Docs + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.GoogleDocs], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Google Docs", + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GOOGLE_DOCS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.GoogleDocs]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gdocs/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Google Docs MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Google Docs from the list of available integrations. + + + + Complete the OAuth flow to connect your Google Docs account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/google_docs + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/google-docs-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-docs-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "google_docs": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Google Docs. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/google_drive.mdx b/docs/mcp-server/google_drive.mdx new file mode 100644 index 00000000..6e28363f --- /dev/null +++ b/docs/mcp-server/google_drive.mdx @@ -0,0 +1,198 @@ +--- +title: 'Google Drive' +description: 'Connect AI agents to Google Drive for file storage and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Google Drive to manage files, folders, permissions, and organize cloud storage through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GOOGLE_DRIVE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Google Drive + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.GoogleDrive], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Google Drive", + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GOOGLE_DRIVE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.GoogleDrive]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gdrive/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Google Drive MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Google Drive from the list of available integrations. + + + + Complete the OAuth flow to connect your Google Drive account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/google_drive + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/google-drive-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-drive-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "google_drive": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Google Drive. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/google_jobs.mdx b/docs/mcp-server/google_jobs.mdx new file mode 100644 index 00000000..b275b401 --- /dev/null +++ b/docs/mcp-server/google_jobs.mdx @@ -0,0 +1,202 @@ +--- +title: 'Google Jobs' +description: 'Connect AI agents to Google Jobs for managing job postings and applications' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Google Jobs to manage job postings and applications through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Google Jobs + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GOOGLE_JOBS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Google Jobs + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.GoogleJobs], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["GoogleJobs"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GOOGLE_JOBS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.GoogleJobs]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/google_jobs/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Google Jobs MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Google Jobs from the list of available integrations. + + + + Complete the OAuth flow to connect your Google Jobs account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/google_jobs + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/google-jobs-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-jobs-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_google_jobs_api_key"}' \ + ghcr.io/klavis-ai/google-jobs-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "google_jobs": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Google Jobs. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/google_sheets.mdx b/docs/mcp-server/google_sheets.mdx new file mode 100644 index 00000000..d873c470 --- /dev/null +++ b/docs/mcp-server/google_sheets.mdx @@ -0,0 +1,210 @@ +--- +title: 'Google Sheets' +description: 'Connect AI agents to Google Sheets for spreadsheet automation and data management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Google Sheets to create, read, and manage spreadsheets and automate data workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GOOGLE_SHEETS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Google Sheets + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.GoogleSheets], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Google Sheets", + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.GOOGLE_SHEETS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.GoogleSheets]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/gsheets/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Google Sheets MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Google Sheets from the list of available integrations. + + + + Complete the OAuth flow to connect your Google account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/google_sheets + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/google-sheets-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-sheets-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "google_sheets": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Watch the Example + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Google Sheets. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/hacker_news.mdx b/docs/mcp-server/hacker_news.mdx new file mode 100644 index 00000000..9bcf0228 --- /dev/null +++ b/docs/mcp-server/hacker_news.mdx @@ -0,0 +1,174 @@ +--- +title: 'Hacker News' +description: 'Connect AI agents to Hacker News for news aggregation and content analysis' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Hacker News to fetch stories, user profiles, and analyze technology news through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Hacker News + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.HACKER_NEWS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Hacker News + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.HackerNews], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Hacker News"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + šŸŽ‰ **Your Hacker News MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Hacker News from the list of available integrations. + + + + No authentication required - your server is immediately ready. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/hacker_news + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/hacker-news-mcp-server:latest + + # Run the server (no API key required for Hacker News) + docker run -p 5000:5000 \ + ghcr.io/klavis-ai/hacker-news-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "hacker_news": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Hacker News. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/heygen.mdx b/docs/mcp-server/heygen.mdx new file mode 100644 index 00000000..45b1ea12 --- /dev/null +++ b/docs/mcp-server/heygen.mdx @@ -0,0 +1,212 @@ +--- +title: 'HeyGen' +description: 'Connect AI agents to HeyGen for AI-powered video generation and avatar creation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to HeyGen to generate AI-powered videos with avatars, manage video content, and create personalized video experiences through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with HeyGen + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.HEYGEN], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with HeyGen + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.HeyGen], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["HeyGen"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.HEYGEN, + auth_data={ + "token": "YOUR_HEYGEN_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.HeyGen, + authData: { + token: "YOUR_HEYGEN_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "HeyGen", + "authData": { + "token": "YOUR_HEYGEN_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your HeyGen MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select HeyGen from the list of available integrations. + + + + Enter your HeyGen API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/heygen + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/heygen-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/heygen-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "heygen": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for HeyGen. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/hubspot.mdx b/docs/mcp-server/hubspot.mdx new file mode 100644 index 00000000..f35194a3 --- /dev/null +++ b/docs/mcp-server/hubspot.mdx @@ -0,0 +1,198 @@ +--- +title: 'HubSpot' +description: 'Connect AI agents to HubSpot for CRM management and sales automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to HubSpot to manage contacts, companies, deals, tickets, and automate CRM operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.HUBSPOT], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with HubSpot + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Hubspot], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["HubSpot"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.HUBSPOT]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.HubSpot]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/hubspot/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your HubSpot MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select HubSpot from the list of available integrations. + + + + Complete the OAuth flow to connect your HubSpot account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/hubspot + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/hubspot-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/hubspot-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "hubspot": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for HubSpot. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/jira.mdx b/docs/mcp-server/jira.mdx new file mode 100644 index 00000000..b88d7092 --- /dev/null +++ b/docs/mcp-server/jira.mdx @@ -0,0 +1,198 @@ +--- +title: 'Jira' +description: 'Connect AI agents to Jira for project tracking and issue management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Jira to manage issues, projects, sprints, and automate development workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.JIRA], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Jira + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Jira], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Jira"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.JIRA]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Jira]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/jira/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Jira MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Jira from the list of available integrations. + + + + Complete the OAuth flow to connect your Jira account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/jira + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/jira-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/jira-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "jira": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Jira. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/klavis-reportgen.mdx b/docs/mcp-server/klavis-reportgen.mdx new file mode 100644 index 00000000..fdffa984 --- /dev/null +++ b/docs/mcp-server/klavis-reportgen.mdx @@ -0,0 +1,212 @@ +--- +title: 'Klavis ReportGen' +description: 'Connect AI agents to Klavis ReportGen for generating visually appealing web reports' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Klavis ReportGen to generate visually appealing JavaScript web reports based on search queries through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Klavis ReportGen + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.KLAVIS_REPORTGEN], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Klavis ReportGen + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.KlavisReportgen], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Klavis ReportGen"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.KLAVIS_REPORTGEN, + auth_data={ + "api_key": "YOUR_KLAVIS_REPORTGEN_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.KlavisReportgen, + authData: { + api_key: "YOUR_KLAVIS_REPORTGEN_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Klavis ReportGen", + "authData": { + "api_key": "YOUR_KLAVIS_REPORTGEN_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Klavis ReportGen MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Klavis ReportGen from the list of available integrations. + + + + Enter your Klavis ReportGen API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/klavis_reportgen + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/klavis-reportgen-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/klavis-reportgen-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "klavis-reportgen": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Klavis ReportGen. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/klaviyo.mdx b/docs/mcp-server/klaviyo.mdx new file mode 100644 index 00000000..3631fcec --- /dev/null +++ b/docs/mcp-server/klaviyo.mdx @@ -0,0 +1,206 @@ +--- +title: 'Klaviyo' +description: 'Connect AI agents to Klaviyo for managing marketing campaigns and customer data' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Klaviyo to manage marketing campaigns and customer data through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Klaviyo + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.KLAVIYO], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Klaviyo + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Klaviyo], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Klaviyo"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.KLAVIYO]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Klaviyo]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/klaviyo/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Klaviyo API key from your [Klaviyo Developer Console](https://developers.klaviyo.com/). + + + + šŸŽ‰ **Your Klaviyo MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Klaviyo from the list of available integrations. + + + + Complete the OAuth flow to connect your Klaviyo account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/klaviyo + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/klaviyo-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/klaviyo-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_klaviyo_api_key"}' \ + ghcr.io/klavis-ai/klaviyo-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "klaviyo": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Klaviyo. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/linear.mdx b/docs/mcp-server/linear.mdx new file mode 100644 index 00000000..5896de05 --- /dev/null +++ b/docs/mcp-server/linear.mdx @@ -0,0 +1,198 @@ +--- +title: 'Linear' +description: 'Connect AI agents to Linear for project management and issue tracking automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Linear to manage issues, projects, teams, and automate project management workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.LINEAR], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Linear + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Linear], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Linear"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.LINEAR]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Linear]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/linear/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Linear MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Linear from the list of available integrations. + + + + Complete the OAuth flow to connect your Linear workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/linear + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/linear-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/linear-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "linear": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Linear. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/linkedin.mdx b/docs/mcp-server/linkedin.mdx new file mode 100644 index 00000000..0cca4489 --- /dev/null +++ b/docs/mcp-server/linkedin.mdx @@ -0,0 +1,202 @@ +--- +title: 'LinkedIn' +description: 'Connect AI agents to LinkedIn for professional networking, content publishing, and profile management automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to LinkedIn to manage profiles, create posts, share content, and automate professional networking activities through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.LINKEDIN], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with LinkedIn + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.LinkedIn], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["LinkedIn"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.LINKEDIN]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.LinkedIn]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/linkedin/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + You can specify custom scope and redirect_url parameters in the OAuth URL. Check the API reference for more details. + + + + Your LinkedIn MCP Server is ready! Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select LinkedIn from the list of available integrations. + + + + Complete the OAuth flow to connect your LinkedIn account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/linkedin + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/linkedin-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/linkedin-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "linkedin": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for LinkedIn. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + diff --git a/docs/mcp-server/mailchimp.mdx b/docs/mcp-server/mailchimp.mdx new file mode 100644 index 00000000..650996dc --- /dev/null +++ b/docs/mcp-server/mailchimp.mdx @@ -0,0 +1,218 @@ +--- +title: 'Mailchimp' +description: 'Connect AI agents to Mailchimp for email marketing and audience management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Mailchimp to manage email marketing campaigns, audiences, and subscriber data through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Mailchimp + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MAILCHIMP], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Mailchimp + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Mailchimp], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Mailchimp"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.MAILCHIMP, + auth_data={ + "data": { + "api_key": "YOUR_MAILCHIMP_API_KEY" + } + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Mailchimp, + authData: { + data: { + api_key: "YOUR_MAILCHIMP_API_KEY" + } + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Mailchimp", + "authData": { + "data": { + "api_key": "YOUR_MAILCHIMP_API_KEY" + } + } + }' + ``` + + + + šŸŽ‰ **Your Mailchimp MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Mailchimp from the list of available integrations. + + + + Enter your Mailchimp API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/mailchimp + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/mailchimp-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/mailchimp-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "mailchimp": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Mailchimp. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/markdown2doc.mdx b/docs/mcp-server/markdown2doc.mdx new file mode 100644 index 00000000..591ca228 --- /dev/null +++ b/docs/mcp-server/markdown2doc.mdx @@ -0,0 +1,175 @@ +--- +title: 'Markdown2doc' +description: 'Connect AI agents to Markdown2doc for converting markdown to various document formats' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Markdown2doc to convert markdown text to different file formats (PDF, DOCX, DOC, HTML, HTML5) through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Markdown2doc + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MARKDOWN2DOC], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Markdown2doc + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Markdown2doc], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Markdown2doc"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + šŸŽ‰ **Your Markdown2doc MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Markdown2doc from the list of available integrations. + + + + Enter your Markdown2doc API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/markdown2doc + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/markdown2doc-mcp-server:latest + + # Run with API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_markdown2doc_api_key"}' \ + ghcr.io/klavis-ai/markdown2doc-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "markdown2doc": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Markdown2doc. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/mem0.mdx b/docs/mcp-server/mem0.mdx new file mode 100644 index 00000000..5b22b5c4 --- /dev/null +++ b/docs/mcp-server/mem0.mdx @@ -0,0 +1,206 @@ +--- +title: 'Mem0' +description: 'Connect AI agents to Mem0 for long-term memory, retrieval, and personalization' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect AI agents to Mem0 for long-term memory, retrieval, and personalization + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Mem0 + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MEM0], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Mem0 + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Mem0], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Mem0"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.MEM0]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Mem0]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/mem0/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Mem0 API key from your [Mem0 Developer Console](https://mem0.ai/). + + + + šŸŽ‰ **Your Mem0 MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Mem0 from the list of available integrations. + + + + Complete the OAuth flow to connect your Mem0 account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/mem0 + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/mem0-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/mem0-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_mem0_api_key"}' \ + ghcr.io/klavis-ai/mem0-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "mem0": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Mem0. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/metabase.mdx b/docs/mcp-server/metabase.mdx new file mode 100644 index 00000000..da6ec349 --- /dev/null +++ b/docs/mcp-server/metabase.mdx @@ -0,0 +1,206 @@ +--- +title: 'Metabase' +description: 'Connect AI agents to Metabase for managing data and analytics' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Metabase to manage data and analytics through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Metabase + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.METABASE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Metabase + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Metabase], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Metabase"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.METABASE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Metabase]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/metabase/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Metabase API key from your [Metabase Developer Console](https://www.metabase.com/docs/latest/developers-guide/start). + + + + šŸŽ‰ **Your Metabase MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Metabase from the list of available integrations. + + + + Complete the OAuth flow to connect your Metabase account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/metabase + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/metabase-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/metabase-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_metabase_api_key"}' \ + ghcr.io/klavis-ai/metabase-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "metabase": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Metabase. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/microsoft_teams.mdx b/docs/mcp-server/microsoft_teams.mdx new file mode 100644 index 00000000..ff91724c --- /dev/null +++ b/docs/mcp-server/microsoft_teams.mdx @@ -0,0 +1,200 @@ +--- +title: 'Microsoft Teams' +description: 'Connect AI agents to Microsoft Teams for team collaboration and communication automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Microsoft Teams to manage teams, channels, messages, and automate collaboration workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Microsoft Teams MCP server instance + teams_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.MICROSOFT_TEAMS, + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Microsoft Teams MCP server instance + const teamsServer = await klavis.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.MicrosoftTeams, + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/instance/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "serverName": "Microsoft Teams", + "userId": "user123" + }' + ``` + + + + **Response Information**: The API returns: + - `serverUrl`: The URL for connecting your MCP client to Microsoft Teams + - `instanceId`: Unique identifier for your server instance + - `oauthUrl`: OAuth authorization URL for Microsoft Teams authentication + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(teams_server.oauth_url) + ``` + + ```typescript TypeScript + // Redirect user to OAuth authorization + window.location.href = teamsServer.oauthUrl; + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/microsoft_teams/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Microsoft Teams MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Microsoft Teams from the list of available integrations. + + + + Complete the OAuth flow to connect your Microsoft account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/microsoft_teams + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/microsoft-teams-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/microsoft-teams-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "microsoft_teams": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Microsoft Teams. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/mixpanel.mdx b/docs/mcp-server/mixpanel.mdx new file mode 100644 index 00000000..8d3e44e2 --- /dev/null +++ b/docs/mcp-server/mixpanel.mdx @@ -0,0 +1,225 @@ +--- +title: 'Mixpanel' +description: 'Connect AI agents to Mixpanel for advanced analytics and user behavior tracking' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Mixpanel to track events, analyze user behavior, and generate insights from your product analytics through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Mixpanel + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MIXPANEL], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Mixpanel + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Mixpanel], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Mixpanel"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.MIXPANEL, + auth_data={ + "data": { + "serviceaccount_username": "YOUR_MIXPANEL_SERVICE_ACCOUNT_USERNAME", + "serviceaccount_secret": "YOUR_MIXPANEL_SERVICE_ACCOUNT_SECRET" + } + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Mixpanel, + authData: { + data: { + serviceaccount_username: "YOUR_MIXPANEL_SERVICE_ACCOUNT_USERNAME", + serviceaccount_secret: "YOUR_MIXPANEL_SERVICE_ACCOUNT_SECRET" + } + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Mixpanel", + "authData": { + "data": { + "serviceaccount_username": "YOUR_MIXPANEL_SERVICE_ACCOUNT_USERNAME", + "serviceaccount_secret": "YOUR_MIXPANEL_SERVICE_ACCOUNT_SECRET" + } + } + }' + ``` + + + + Get your Mixpanel service account credentials from your [Mixpanel project settings](https://mixpanel.com/settings/project/). These credentials are required to access your analytics data. + + + + šŸŽ‰ **Your Mixpanel MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Mixpanel from the list of available integrations. + + + + Complete the authentication flow to connect your Mixpanel service account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/mixpanel + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/mixpanel-mcp-server:latest + + # Run with Mixpanel service account credentials + docker run -p 5000:5000 \ + -e AUTH_DATA='{"data":{"serviceaccount_username":"your_username","serviceaccount_secret":"your_secret"}}' \ + ghcr.io/klavis-ai/mixpanel-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "mixpanel": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Mixpanel. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/monday.mdx b/docs/mcp-server/monday.mdx new file mode 100644 index 00000000..70c5982a --- /dev/null +++ b/docs/mcp-server/monday.mdx @@ -0,0 +1,202 @@ +--- +title: 'Monday.com' +description: 'Connect AI agents to Monday.com for project management and workflow automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Monday.com to manage projects, track tasks, and automate workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MONDAY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Monday.com + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Monday], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Monday"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.MONDAY]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Monday]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/monday/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Monday.com MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Monday.com from the list of available integrations. + + + + Complete the OAuth flow to connect your Monday.com account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/monday + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/monday-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/monday-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_monday_api_token"}' \ + ghcr.io/klavis-ai/monday-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "monday": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Monday.com. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + diff --git a/docs/mcp-server/moneybird.mdx b/docs/mcp-server/moneybird.mdx new file mode 100644 index 00000000..1aac1e1d --- /dev/null +++ b/docs/mcp-server/moneybird.mdx @@ -0,0 +1,212 @@ +--- +title: 'Moneybird' +description: 'Connect AI agents to Moneybird for accounting and financial management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Moneybird to manage accounting data, contacts, invoices, and financial records through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Moneybird + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MONEYBIRD], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Moneybird + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Moneybird], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Moneybird"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.MONEYBIRD, + auth_data={ + "token": "YOUR_MONEYBIRD_API_TOKEN" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Moneybird, + authData: { + token: "YOUR_MONEYBIRD_API_TOKEN" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Moneybird", + "authData": { + "token": "YOUR_MONEYBIRD_API_TOKEN" + } + }' + ``` + + + + šŸŽ‰ **Your Moneybird MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Moneybird from the list of available integrations. + + + + Enter your Moneybird API token to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/moneybird + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/moneybird-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/moneybird-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "moneybird": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Moneybird. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/motion.mdx b/docs/mcp-server/motion.mdx new file mode 100644 index 00000000..de37dd88 --- /dev/null +++ b/docs/mcp-server/motion.mdx @@ -0,0 +1,212 @@ +--- +title: 'Motion' +description: 'Connect AI agents to Motion for intelligent task management and calendar automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Motion to manage tasks, projects, workspaces, and automate scheduling workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Motion + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.MOTION], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Motion + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Motion], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Motion"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.MOTION, + auth_data={ + "api_key": "YOUR_MOTION_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Motion, + authData: { + api_key: "YOUR_MOTION_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Motion", + "authData": { + "api_key": "YOUR_MOTION_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Motion MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Motion from the list of available integrations. + + + + Enter your Motion API key to connect. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/motion + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/motion-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/motion-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "motion": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Motion. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/notion.mdx b/docs/mcp-server/notion.mdx new file mode 100644 index 00000000..cc013f02 --- /dev/null +++ b/docs/mcp-server/notion.mdx @@ -0,0 +1,214 @@ +--- +title: 'Notion' +description: 'Connect AI agents to Notion for knowledge management and collaborative workflows' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Notion to manage pages, databases, and content, enabling AI agents to create, update, and search your workspace. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.NOTION], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Notion + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Notion], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Notion"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.NOTION]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Notion]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/notion/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Notion MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Notion from the list of available integrations. + + + + Complete the OAuth flow to connect your Notion workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/notion + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/notion-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/notion-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_notion_token"}' \ + ghcr.io/klavis-ai/notion-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "notion": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Watch the Example + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Notion. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/onedrive.mdx b/docs/mcp-server/onedrive.mdx new file mode 100644 index 00000000..370b05a4 --- /dev/null +++ b/docs/mcp-server/onedrive.mdx @@ -0,0 +1,198 @@ +--- +title: 'OneDrive' +description: 'Connect AI agents to OneDrive for cloud storage management and file automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to OneDrive to manage files, folders, and automate cloud storage operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.ONEDRIVE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a OneDrive MCP server instance + const onedriveServer = await klavis.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Onedrive, + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["OneDrive"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.ONEDRIVE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.OneDrive]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/onedrive/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your OneDrive MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select OneDrive from the list of available integrations. + + + + Complete the OAuth flow to connect your Microsoft account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/onedrive + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/onedrive-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/onedrive-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "onedrive": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for OneDrive. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/openrouter.mdx b/docs/mcp-server/openrouter.mdx new file mode 100644 index 00000000..b3e27ecb --- /dev/null +++ b/docs/mcp-server/openrouter.mdx @@ -0,0 +1,208 @@ +--- +title: 'OpenRouter' +description: 'Connect AI agents to OpenRouter for access to multiple AI models and model comparison' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to OpenRouter to access multiple AI models, compare model performance, and manage usage across different AI providers through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with OpenRouter + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.OPENROUTER], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with OpenRouter + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.OpenRouter], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["OpenRouter"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + You can get your OpenRouter API key from the [OpenRouter website](https://openrouter.ai/keys). + + + + ```python Python + # Set the OpenRouter API key for your instance + response = klavis_client.mcp_server.set_instance_auth( + instance_id=openrouter_server.instance_id, + auth_data={ + "api_key": "YOUR_OPENROUTER_API_KEY" + } + ) + ``` + + ```typescript TypeScript + // Set the OpenRouter API key for your instance + const response = await klavis.mcpServer.setInstanceAuth({ + instanceId: openrouterServer.instanceId, + authData: { + api_key: "YOUR_OPENROUTER_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/instance/set-auth" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "instanceId": "YOUR_INSTANCE_ID", + "authData": { + "api_key": "YOUR_OPENROUTER_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your OpenRouter MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client to access multiple AI models. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select OpenRouter from the list of available integrations. + + + + Enter your OpenRouter API key to enable multi-model access. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/openrouter + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/openrouter-mcp-server:latest + + # Run with OpenRouter API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"api_key":"your_openrouter_api_key"}' \ + ghcr.io/klavis-ai/openrouter-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "openrouter": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for OpenRouter. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/outlook.mdx b/docs/mcp-server/outlook.mdx new file mode 100644 index 00000000..1f88a4b3 --- /dev/null +++ b/docs/mcp-server/outlook.mdx @@ -0,0 +1,198 @@ +--- +title: 'Outlook' +description: 'Connect AI agents to Outlook for email management and automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Outlook to read, send, organize emails, and automate email workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.OUTLOOK], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Outlook + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Outlook], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Outlook"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.OUTLOOK]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Outlook]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/outlook/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Outlook MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Outlook from the list of available integrations. + + + + Complete the OAuth flow to connect your Microsoft account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/outlook + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/outlook-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/outlook-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "outlook": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Outlook. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + diff --git a/docs/mcp-server/overview.mdx b/docs/mcp-server/overview.mdx new file mode 100644 index 00000000..b48be863 --- /dev/null +++ b/docs/mcp-server/overview.mdx @@ -0,0 +1,290 @@ +--- +title: 'MCP Servers Overview' +description: 'Quickstart to Klavis-hosted MCP servers and available integrations.' +--- + +import KlavisReportgenCard from '/snippets/mcp-server-card/klavis-reportgen-card.mdx'; +import GithubCard from '/snippets/mcp-server-card/github-card.mdx'; +import YoutubeCard from '/snippets/mcp-server-card/youtube-card.mdx'; +import SupabaseCard from '/snippets/mcp-server-card/supabase-card.mdx'; +import SlackCard from '/snippets/mcp-server-card/slack-card.mdx'; +import JiraCard from '/snippets/mcp-server-card/jira-card.mdx'; +import SalesforceCard from '/snippets/mcp-server-card/salesforce-card.mdx'; +import ResendCard from '/snippets/mcp-server-card/resend-card.mdx'; +import PostgresCard from '/snippets/mcp-server-card/postgres-card.mdx'; +import WordpressCard from '/snippets/mcp-server-card/wordpress-card.mdx'; +import NotionCard from '/snippets/mcp-server-card/notion-card.mdx'; +import GmailCard from '/snippets/mcp-server-card/gmail-card.mdx'; +import GoogleCalendarCard from '/snippets/mcp-server-card/google-calendar-card.mdx'; +import GoogleDriveCard from '/snippets/mcp-server-card/google-drive-card.mdx'; +import GoogleDocsCard from '/snippets/mcp-server-card/google-docs-card.mdx'; +import GoogleSheetsCard from '/snippets/mcp-server-card/google-sheets-card.mdx'; +import LinearCard from '/snippets/mcp-server-card/linear-card.mdx'; +import DiscordCard from '/snippets/mcp-server-card/discord-card.mdx'; +import StripeCard from '/snippets/mcp-server-card/stripe-card.mdx'; +import ConfluenceCard from '/snippets/mcp-server-card/confluence-card.mdx'; +import AffinityCard from '/snippets/mcp-server-card/affinity-card.mdx'; +import AirtableCard from '/snippets/mcp-server-card/airtable-card.mdx'; +import AsanaCard from '/snippets/mcp-server-card/asana-card.mdx'; +import CloseCard from '/snippets/mcp-server-card/close-card.mdx'; +import AttioCard from '/snippets/mcp-server-card/attio-card.mdx'; +import CalendlyCard from '/snippets/mcp-server-card/calendly-card.mdx'; +import ClickupCard from '/snippets/mcp-server-card/clickup-card.mdx'; +import CoinbaseCard from '/snippets/mcp-server-card/coinbase-card.mdx'; +import DropboxCard from '/snippets/mcp-server-card/dropbox-card.mdx'; +import QuickbooksCard from '/snippets/mcp-server-card/quickbooks-card.mdx'; +import HubspotCard from '/snippets/mcp-server-card/hubspot-card.mdx'; +import LinkedinCard from '/snippets/mcp-server-card/linkedin-card.mdx'; +import OnedriveCard from '/snippets/mcp-server-card/onedrive-card.mdx'; +import OutlookCard from '/snippets/mcp-server-card/outlook-card.mdx'; +import MicrosoftTeamsCard from '/snippets/mcp-server-card/microsoft-teams-card.mdx'; +import PerplexityCard from '/snippets/mcp-server-card/perplexity-card.mdx'; +import ZendeskCard from '/snippets/mcp-server-card/zendesk-card.mdx'; +import GongCard from '/snippets/mcp-server-card/gong-card.mdx'; +import MotionCard from '/snippets/mcp-server-card/motion-card.mdx'; +import DeepResearchCard from '/snippets/mcp-server-card/deep-research-card.mdx'; +import WebSearchCard from '/snippets/mcp-server-card/web-search-card.mdx'; +import Doc2markdownCard from '/snippets/mcp-server-card/doc2markdown-card.mdx'; +import Markdown2docCard from '/snippets/mcp-server-card/markdown2doc-card.mdx'; +import MixpanelCard from '/snippets/mcp-server-card/mixpanel-card.mdx'; +import BraveSearchCard from '/snippets/mcp-server-card/brave-search-card.mdx'; +import BoxCard from '/snippets/mcp-server-card/box-card.mdx'; +import CloudflareCard from '/snippets/mcp-server-card/cloudflare-card.mdx'; +import DialpadCard from '/snippets/mcp-server-card/dialpad-card.mdx'; +import DocusignCard from '/snippets/mcp-server-card/docusign-card.mdx'; +import ElevenlabsCard from '/snippets/mcp-server-card/elevenlabs-card.mdx'; +import ExaCard from '/snippets/mcp-server-card/exa-card.mdx'; +import FigmaCard from '/snippets/mcp-server-card/figma-card.mdx'; +import FirefliesCard from '/snippets/mcp-server-card/fireflies-card.mdx'; +import FreshdeskCard from '/snippets/mcp-server-card/freshdesk-card.mdx'; +import GitlabCard from '/snippets/mcp-server-card/gitlab-card.mdx'; +import GoogleJobsCard from '/snippets/mcp-server-card/google-jobs-card.mdx'; +import HackerNewsCard from '/snippets/mcp-server-card/hacker-news-card.mdx'; +import HeygenCard from '/snippets/mcp-server-card/heygen-card.mdx'; +import KlaviyoCard from '/snippets/mcp-server-card/klaviyo-card.mdx'; +import MemoCard from '/snippets/mcp-server-card/memo-card.mdx'; +import MetabaseCard from '/snippets/mcp-server-card/metabase-card.mdx'; +import MondayCard from '/snippets/mcp-server-card/monday-card.mdx'; +import MoneybirdCard from '/snippets/mcp-server-card/moneybird-card.mdx'; +import OpenrouterCard from '/snippets/mcp-server-card/openrouter-card.mdx'; +import PagerdutyCard from '/snippets/mcp-server-card/pagerduty-card.mdx'; +import PipedriveCard from '/snippets/mcp-server-card/pipedrive-card.mdx'; +import PlaiCard from '/snippets/mcp-server-card/plai-card.mdx'; +import PosthogCard from '/snippets/mcp-server-card/posthog-card.mdx'; +import SendgridCard from '/snippets/mcp-server-card/sendgrid-card.mdx'; +import ShopifyCard from '/snippets/mcp-server-card/shopify-card.mdx'; +import TavilyCard from '/snippets/mcp-server-card/tavily-card.mdx'; +import UnifiedMcpCard from '/snippets/mcp-server-card/unified-mcp-card.mdx'; +import VercelCard from '/snippets/mcp-server-card/vercel-card.mdx'; +import WhatsappCard from '/snippets/mcp-server-card/whatsapp-card.mdx'; +import CalComCard from '/snippets/mcp-server-card/calcom-card.mdx'; +import FathomCard from '/snippets/mcp-server-card/fathom-card.mdx'; +import ServiceNowCard from '/snippets/mcp-server-card/servicenow-card.mdx'; + +Learn how to easily integrate with Klavis remote hosted MCP Servers. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/mcp-server/pagerduty.mdx b/docs/mcp-server/pagerduty.mdx new file mode 100644 index 00000000..8b9c6cdf --- /dev/null +++ b/docs/mcp-server/pagerduty.mdx @@ -0,0 +1,206 @@ +--- +title: 'Pagerduty' +description: 'Connect AI agents to Pagerduty for managing incidents and alerts' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Pagerduty to manage incidents and alerts through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Pagerduty + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.PAGERDUTY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Pagerduty + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Pagerduty], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Pagerduty"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.PAGERDUTY]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Pagerduty]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/pagerduty/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Pagerduty API key from your [Pagerduty Developer Console](https://developer.pagerduty.com/). + + + + šŸŽ‰ **Your Pagerduty MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Pagerduty from the list of available integrations. + + + + Complete the OAuth flow to connect your Pagerduty account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/pagerduty + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/pagerduty-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/pagerduty-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_pagerduty_api_key"}' \ + ghcr.io/klavis-ai/pagerduty-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "pagerduty": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Pagerduty. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/perplexity.mdx b/docs/mcp-server/perplexity.mdx new file mode 100644 index 00000000..48ccada3 --- /dev/null +++ b/docs/mcp-server/perplexity.mdx @@ -0,0 +1,212 @@ +--- +title: 'Perplexity' +description: 'Learn how to use Klavis to connect your AI application to Perplexity MCP Server' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Perplexity to leverage AI-powered search capabilities and advanced research functionality through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Perplexity + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.PERPLEXITY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Perplexity + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Perplexity], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Perplexity"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.PERPLEXITY, + auth_data={ + "api_key": "YOUR_PERPLEXITY_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Perplexity, + authData: { + api_key: "YOUR_PERPLEXITY_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Perplexity", + "authData": { + "api_key": "YOUR_PERPLEXITY_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Perplexity MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Perplexity from the list of available integrations. + + + + Enter your Perplexity API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/perplexity + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/perplexity-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/perplexity-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "perplexity": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Perplexity. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/pipedrive.mdx b/docs/mcp-server/pipedrive.mdx new file mode 100644 index 00000000..65b5acee --- /dev/null +++ b/docs/mcp-server/pipedrive.mdx @@ -0,0 +1,206 @@ +--- +title: 'Pipedrive' +description: 'Connect AI agents to Pipedrive for managing sales and CRM' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Pipedrive to manage sales and CRM through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Pipedrive + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.PIPEDRIVE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Pipedrive + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Pipedrive], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Pipedrive"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.PIPEDRIVE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Pipedrive]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/pipedrive/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Pipedrive API key from your [Pipedrive Developer Console](https://developers.pipedrive.com/). + + + + šŸŽ‰ **Your Pipedrive MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Pipedrive from the list of available integrations. + + + + Complete the OAuth flow to connect your Pipedrive account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/pipedrive + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/pipedrive-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/pipedrive-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_pipedrive_api_key"}' \ + ghcr.io/klavis-ai/pipedrive-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "pipedrive": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Pipedrive. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/plai.mdx b/docs/mcp-server/plai.mdx new file mode 100644 index 00000000..99eb5f58 --- /dev/null +++ b/docs/mcp-server/plai.mdx @@ -0,0 +1,212 @@ +--- +title: 'Plai' +description: 'Connect AI agents to Plai for Facebook, Instagram, and LinkedIn advertising' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Plai to create and manage Facebook, Instagram, and LinkedIn ad campaigns through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Plai + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.PLAI], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Plai + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Plai], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Plai"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.PLAI, + auth_data={ + "api_key": "YOUR_PLAI_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Plai, + authData: { + api_key: "YOUR_PLAI_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Plai", + "authData": { + "api_key": "YOUR_PLAI_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Plai MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Plai from the list of available integrations. + + + + Enter your Plai API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/plai + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/plai-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/plai-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "plai": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Plai. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/postgres.mdx b/docs/mcp-server/postgres.mdx new file mode 100644 index 00000000..c00a884d --- /dev/null +++ b/docs/mcp-server/postgres.mdx @@ -0,0 +1,216 @@ +--- +title: 'PostgreSQL' +description: 'Connect AI agents to PostgreSQL databases for query execution, data analysis, and database management automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to PostgreSQL databases to execute queries, analyze data, manage tables, and automate database operations through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with PostgreSQL + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.POSTGRES], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + # Create a Strata MCP server with PostgreSQL + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Postgres], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Postgres"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.POSTGRES, + auth_data={ + "api_key": "postgresql://username:password@host:port/database" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Postgres, + authData: { + api_key: "postgresql://username:password@host:port/database" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Postgres", + "authData": { + "api_key": "postgresql://username:password@host:port/database" + } + }' + ``` + + + + Use the standard PostgreSQL connection string format: `postgresql://username:password@host:port/database` + + + + šŸŽ‰ **Your PostgreSQL MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select PostgreSQL from the list of available integrations. + + + + Enter your PostgreSQL connection string in the configuration form. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/postgres + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/postgres-mcp-server:latest + + # Run with connection string + docker run -p 5000:5000 \ + -e POSTGRES_CONNECTION_STRING="postgresql://username:password@host:port/database" \ + ghcr.io/klavis-ai/postgres-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "postgres": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for PostgreSQL. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/posthog.mdx b/docs/mcp-server/posthog.mdx new file mode 100644 index 00000000..0d578857 --- /dev/null +++ b/docs/mcp-server/posthog.mdx @@ -0,0 +1,206 @@ +--- +title: 'Posthog' +description: 'Connect AI agents to Posthog for managing analytics and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Posthog to manage analytics and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Posthog + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.POSTHOG], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Posthog + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Posthog], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Posthog"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.POSTHOG]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Posthog]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/posthog/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Posthog API key from your [Posthog Developer Console](https://posthog.com/docs). + + + + šŸŽ‰ **Your Posthog MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Posthog from the list of available integrations. + + + + Complete the OAuth flow to connect your Posthog account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/posthog + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/posthog-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/posthog-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_posthog_api_key"}' \ + ghcr.io/klavis-ai/posthog-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "posthog": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Posthog. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/quickbooks.mdx b/docs/mcp-server/quickbooks.mdx new file mode 100644 index 00000000..f40c265a --- /dev/null +++ b/docs/mcp-server/quickbooks.mdx @@ -0,0 +1,198 @@ +--- +title: 'QuickBooks' +description: 'Connect AI agents to QuickBooks for accounting automation and financial data management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to QuickBooks to manage accounts, invoices, customers, payments, vendors, and automate accounting workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.QUICKBOOKS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with QuickBooks + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.QuickBooks], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["QuickBooks"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.QUICKBOOKS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.QuickBooks]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/quickbooks/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your QuickBooks MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select QuickBooks from the list of available integrations. + + + + Complete the OAuth flow to connect your QuickBooks company. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/quickbooks + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/quickbooks-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/quickbooks-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "quickbooks": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for QuickBooks. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/resend.mdx b/docs/mcp-server/resend.mdx new file mode 100644 index 00000000..0fd29334 --- /dev/null +++ b/docs/mcp-server/resend.mdx @@ -0,0 +1,212 @@ +--- +title: 'Resend' +description: 'Connect AI agents to Resend for automated email campaigns and transactional messaging' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Resend to send emails, manage audiences, create broadcasts, and automate email marketing through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Resend + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.RESEND], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Resend + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Resend], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Resend"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.RESEND, + auth_data={ + "api_key": "YOUR_RESEND_API_KEY" + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Resend, + authData: { + api_key: "YOUR_RESEND_API_KEY" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Resend", + "authData": { + "api_key": "YOUR_RESEND_API_KEY" + } + }' + ``` + + + + šŸŽ‰ **Your Resend MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Resend from the list of available integrations. + + + + Enter your Resend API key to connect. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/resend + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/resend-mcp-server:latest + + # Run with API key support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/resend-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "resend": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Resend. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/salesforce.mdx b/docs/mcp-server/salesforce.mdx new file mode 100644 index 00000000..cc211651 --- /dev/null +++ b/docs/mcp-server/salesforce.mdx @@ -0,0 +1,198 @@ +--- +title: 'Salesforce' +description: 'Connect AI agents to Salesforce for CRM and sales automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Salesforce to manage leads, opportunities, accounts, contacts, and automate sales processes through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SALESFORCE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Salesforce + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Salesforce], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Salesforce"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SALESFORCE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Salesforce]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/salesforce/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Salesforce MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Salesforce from the list of available integrations. + + + + Complete the OAuth flow to connect your Salesforce account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/salesforce + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/salesforce-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/salesforce-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "salesforce": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Salesforce. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/sendgrid.mdx b/docs/mcp-server/sendgrid.mdx new file mode 100644 index 00000000..1a5fd5a4 --- /dev/null +++ b/docs/mcp-server/sendgrid.mdx @@ -0,0 +1,206 @@ +--- +title: 'Sendgrid' +description: 'Connect AI agents to Sendgrid for managing emails and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Sendgrid to manage emails and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Sendgrid + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SENDGRID], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Sendgrid + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Sendgrid], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Sendgrid"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SENDGRID]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Sendgrid]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/sendgrid/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Sendgrid API key from your [Sendgrid Developer Console](https://sendgrid.com/en-us/solutions/email-api). + + + + šŸŽ‰ **Your Sendgrid MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Sendgrid from the list of available integrations. + + + + Complete the OAuth flow to connect your Sendgrid account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/sendgrid + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/sendgrid-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/sendgrid-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_sendgrid_api_key"}' \ + ghcr.io/klavis-ai/sendgrid-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "sendgrid": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Sendgrid. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/servicenow.mdx b/docs/mcp-server/servicenow.mdx new file mode 100644 index 00000000..6cb43ac2 --- /dev/null +++ b/docs/mcp-server/servicenow.mdx @@ -0,0 +1,191 @@ +--- +title: 'ServiceNow' +description: 'Connect ServiceNow to manage IT service workflows through AI agents.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to ServiceNow for a central database and application suite to build custom workflows, automate routine work, and manage service requests for various business functions with AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with ServiceNow + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SERVICENOW], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with ServiceNow + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.ServiceNow], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["ServiceNow"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.SERVICENOW, + auth_data={ + data={ + "instance": "YOUR_SERVICENOW_INSTANCE", + "username": "YOUR_SERVICENOW_USERNAME", + "password": "YOUR_SERVICENOW_PASSWORD" + } + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Cloudflare, + authData: { + data: { + instance: "YOUR_SERVICENOW_INSTANCE", + username: "YOUR_SERVICENOW_USERNAME", + password: "YOUR_SERVICENOW_PASSWORD" + } + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "ServiceNow", + "authData": { + "data": { + "instance": "YOUR_SERVICENOW_INSTANCE", + "username": "YOUR_SERVICENOW_USERNAME", + "password": "YOUR_SERVICENOW_PASSWORD" + } + } + }' + ``` + + + + šŸŽ‰ **Your ServiceNow MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select ServiceNow from the list of available integrations. + + + + Enter your ServiceNow instance name, user name and password to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for ServiceNow. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/shopify.mdx b/docs/mcp-server/shopify.mdx new file mode 100644 index 00000000..615b9b88 --- /dev/null +++ b/docs/mcp-server/shopify.mdx @@ -0,0 +1,229 @@ +--- +title: 'Shopify' +description: 'Connect AI agents to Shopify for managing e-commerce and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Shopify to manage e-commerce and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Shopify + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SHOPIFY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Shopify + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Shopify], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Shopify"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + klavis_client.mcp_server.set_strata_auth( + strata_id=response.strata_id, + server_name=McpServerName.SHOPIFY, + auth_data={ + "data": { + "access_token": "YOUR_SHOPIFY_ACCESS_TOKEN", + "shop_domain": "YOUR_SHOP_DOMAIN" + } + } + ) + ``` + + ```typescript TypeScript + await klavis.mcpServer.setStrataAuth({ + strataId: response.strataId, + serverName: Klavis.McpServerName.Shopify, + authData: { + data: { + access_token: "YOUR_SHOPIFY_ACCESS_TOKEN", + shop_domain: "YOUR_SHOP_DOMAIN" + } + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "strataId": "YOUR_STRATA_ID", + "serverName": "Shopify", + "authData": { + "data": { + "access_token": "YOUR_SHOPIFY_ACCESS_TOKEN", + "shop_domain": "YOUR_SHOP_DOMAIN" + } + } + }' + ``` + + + + Get your Shopify access token from your [Shopify Admin API settings](https://shopify.dev/docs/api/admin-rest). The shop domain should be in the format `your-store.myshopify.com`. + + + + šŸŽ‰ **Your Shopify MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Shopify from the list of available integrations. + + + + Complete the OAuth flow to connect your Shopify account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/shopify + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/shopify-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/shopify-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_shopify_api_key"}' \ + ghcr.io/klavis-ai/shopify-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "shopify": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Shopify. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/slack.mdx b/docs/mcp-server/slack.mdx new file mode 100644 index 00000000..465e3540 --- /dev/null +++ b/docs/mcp-server/slack.mdx @@ -0,0 +1,203 @@ +--- +title: 'Slack' +description: 'Connect AI agents to Slack for team communication and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Slack to manage team communication, automate workflows, and collaborate through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SLACK], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Slack + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Slack], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Slack"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SLACK]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Slack]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/slack/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Slack MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Slack from the list of available integrations. + + + + Complete the OAuth flow to connect your Slack workspace. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/slack + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/slack-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/slack-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"bot_token":"your_slack_bot_token"}' \ + ghcr.io/klavis-ai/slack-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "slack": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Slack. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/stripe.mdx b/docs/mcp-server/stripe.mdx new file mode 100644 index 00000000..af20c688 --- /dev/null +++ b/docs/mcp-server/stripe.mdx @@ -0,0 +1,212 @@ +--- +title: 'Stripe' +description: 'Connect AI agents to Stripe for payment processing, customer management, and subscription automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Stripe to manage payments, customers, subscriptions, and automate payment processing workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.STRIPE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Stripe + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Stripe], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Stripe"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + # Configure your Stripe API key in the Klavis dashboard or via API + # Get your Stripe secret key from https://dashboard.stripe.com/apikeys + stripe_api_key = "sk_test_your_stripe_secret_key_here" + ``` + + ```typescript TypeScript + // Configure your Stripe API key in the Klavis dashboard or via API + // Get your Stripe secret key from https://dashboard.stripe.com/apikeys + const stripeApiKey = "sk_test_your_stripe_secret_key_here"; + ``` + + ```bash cURL + # Configure your Stripe API key in the Klavis dashboard + # Get your Stripe secret key from https://dashboard.stripe.com/apikeys + echo "Configure your API key at: https://www.klavis.ai/home/mcp-servers" + ``` + + + + Get your Stripe secret key from your [Stripe Dashboard](https://dashboard.stripe.com/apikeys). Use test keys for development. + + + + šŸŽ‰ **Your Stripe MCP Server is ready!** Once the API key is configured, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Stripe from the list of available integrations. + + + + Enter your Stripe secret API key in the configuration form. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/stripe + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/stripe-mcp-server:latest + + # Run with API key + docker run -p 5000:5000 \ + -e STRIPE_API_KEY="sk_test_your_stripe_secret_key_here" \ + ghcr.io/klavis-ai/stripe-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "stripe": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Watch the Example + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Stripe. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/supabase.mdx b/docs/mcp-server/supabase.mdx new file mode 100644 index 00000000..49c9b768 --- /dev/null +++ b/docs/mcp-server/supabase.mdx @@ -0,0 +1,262 @@ +--- +title: 'Supabase' +description: 'Connect AI agents to Supabase for database management, project creation, and serverless backend automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Supabase to manage database projects, execute SQL queries, handle migrations, and automate backend development workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SUPABASE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Supabase + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Supabase], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Supabase"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SUPABASE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Supabase]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/supabase/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + + + ```python Python + from klavis import Klavis + + klavis_client = Klavis(api_key="YOUR_KLAVIS_API_KEY") + + # Set the Supabase access token + response = klavis_client.mcp_server.set_instance_auth( + instance_id="YOUR_INSTANCE_ID", + auth_data={ + "token": "YOUR_SUPABASE_ACCESS_TOKEN" + } + ) + ``` + + ```typescript TypeScript + import { KlavisClient } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: 'YOUR_KLAVIS_API_KEY' }); + + // Set the Supabase access token + const response = await klavis.mcpServer.setInstanceAuth({ + instanceId: "YOUR_INSTANCE_ID", + authData: { + token: "YOUR_SUPABASE_ACCESS_TOKEN" + } + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/instance/set-auth" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "instanceId": "YOUR_INSTANCE_ID", + "authData": { + "token": "YOUR_SUPABASE_ACCESS_TOKEN" + } + }' + ``` + + + + Get your Supabase access token from [Supabase Dashboard Account Tokens](https://supabase.com/dashboard/account/tokens). + + + + + + Your Supabase MCP Server is ready! Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Supabase from the list of available integrations. + + + + Complete the OAuth flow to connect your Supabase account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/supabase + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/supabase-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/supabase-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "supabase": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +### Video Tutorial + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Supabase. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + diff --git a/docs/mcp-server/tavily.mdx b/docs/mcp-server/tavily.mdx new file mode 100644 index 00000000..2e1b2e93 --- /dev/null +++ b/docs/mcp-server/tavily.mdx @@ -0,0 +1,206 @@ +--- +title: 'Tavily' +description: 'Connect AI agents to Tavily for managing files and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Tavily to manage files and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Tavily + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.TAVILY], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Tavily + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Tavily], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Tavily"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.TAVILY]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Tavily]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/tavily/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Tavily API key from your [Tavily Developer Console](https://docs.tavily.com/welcome). + + + + šŸŽ‰ **Your Tavily MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Tavily from the list of available integrations. + + + + Complete the OAuth flow to connect your Tavily account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/tavily + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/tavily-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/tavily-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_tavily_api_key"}' \ + ghcr.io/klavis-ai/tavily-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "tavily": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Tavily. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/vercel.mdx b/docs/mcp-server/vercel.mdx new file mode 100644 index 00000000..b9baebea --- /dev/null +++ b/docs/mcp-server/vercel.mdx @@ -0,0 +1,206 @@ +--- +title: 'Vercel' +description: 'Connect AI agents to Vercel for managing deployments and collaboration' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Box to manage files and collaboration through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Vercel + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.VERCEL], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Vercel + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Vercel], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Vercel"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.VERCEL]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Vercel]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/vercel/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Vercel API key from your [Vercel Dashboard](https://vercel.com/dashboard). + + + + šŸŽ‰ **Your Vercel MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Vercel from the list of available integrations. + + + + Complete the OAuth flow to connect your Vercel account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/vercel + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/vercel-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/vercel-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_vercel_api_key"}' \ + ghcr.io/klavis-ai/vercel-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "vercel": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Vercel. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/whatsapp.mdx b/docs/mcp-server/whatsapp.mdx new file mode 100644 index 00000000..9a35bbf0 --- /dev/null +++ b/docs/mcp-server/whatsapp.mdx @@ -0,0 +1,175 @@ +--- +title: 'WhatsApp' +description: 'Connect AI agents to WhatsApp for business messaging and communication' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to WhatsApp to send messages and manage conversations through the WhatsApp Business API via AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with WhatsApp + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.WHATSAPP], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with WhatsApp + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.WhatsApp], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["WhatsApp"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + šŸŽ‰ **Your WhatsApp MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select WhatsApp from the list of available integrations. + + + + Enter your WhatsApp Business API key to authenticate the connection. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/whatsapp + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/whatsapp-mcp-server:latest + + # Run with WhatsApp API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"token":"your_whatsapp_api_key"}' \ + ghcr.io/klavis-ai/whatsapp-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "whatsapp": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for WhatsApp. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/wordpress.mdx b/docs/mcp-server/wordpress.mdx new file mode 100644 index 00000000..f58ca40d --- /dev/null +++ b/docs/mcp-server/wordpress.mdx @@ -0,0 +1,210 @@ +--- +title: 'WordPress' +description: 'Connect AI agents to WordPress for content management, post creation, and blog automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to WordPress.com to manage posts, pages, sites, and automate content publishing workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.WORDPRESS], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with WordPress + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.WordPress], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["WordPress"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.WORDPRESS]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.WordPress]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/wordpress/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Your WordPress MCP Server is ready! Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select WordPress from the list of available integrations. + + + + Complete the OAuth flow to connect your WordPress.com account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/wordpress + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/wordpress-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/wordpress-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "wordpress": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +### Video Tutorial + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for WordPress. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/mcp-server/youtube.mdx b/docs/mcp-server/youtube.mdx new file mode 100644 index 00000000..9a098045 --- /dev/null +++ b/docs/mcp-server/youtube.mdx @@ -0,0 +1,174 @@ +--- +title: 'YouTube' +description: 'Connect AI agents to YouTube for video transcript extraction, content analysis, and YouTube data automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to YouTube to extract video transcripts, analyze content, and automate video data processing workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with YouTube + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.YOUTUBE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with YouTube + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.YouTube], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["YouTube"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + šŸŽ‰ **Your YouTube MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select YouTube from the list of available integrations. + + + + No authentication required - your server is immediately ready. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/youtube + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/youtube-mcp-server:latest + + # Run the server + docker run -p 5000:5000 \ + ghcr.io/klavis-ai/youtube-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "youtube": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for YouTube. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + diff --git a/docs/mcp-server/zendesk.mdx b/docs/mcp-server/zendesk.mdx new file mode 100644 index 00000000..6cd7d7df --- /dev/null +++ b/docs/mcp-server/zendesk.mdx @@ -0,0 +1,203 @@ +--- +title: 'Zendesk' +description: 'Connect AI agents to Zendesk for customer support and ticket management' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Zendesk to manage customer support tickets, handle inquiries, and streamline support workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.ZENDESK], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Zendesk + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Zendesk], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Zendesk"], + "userId": "user123" + }' + ``` + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.ZENDESK]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Zendesk]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/zendesk/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + šŸŽ‰ **Your Zendesk MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Zendesk from the list of available integrations. + + + + Complete the OAuth flow to connect your Zendesk account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/zendesk + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/zendesk-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/zendesk-mcp-server:latest + + # Or run with manual OAuth token + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_zendesk_oauth_token"}' \ + ghcr.io/klavis-ai/zendesk-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "zendesk": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Zendesk. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + \ No newline at end of file diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx new file mode 100644 index 00000000..68c9722b --- /dev/null +++ b/docs/quickstart.mdx @@ -0,0 +1,430 @@ +--- +title: Quickstart +description: Let your agent connect any tools reliably in minutes via MCP +icon: rocket +mode: "wide" +--- + + + +**Prerequisites** Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get the API Key. + + + +## Strata +One MCP server that lets AI agents handle any tools progressively. + + + + + + Go to your Dashboard. + + + Klavis enables all integrations for you by default. Click the ellipsis button if you want to disable a specific integration. + + + Strata UI Dashboard + Strata UI Dashboard + + + Complete authentication by clicking the "Authorize" button. You can also skip this step since we have an authentication handler tool that will prompt to authenticate when needed. + + + Add to your favorite MCP-supported clients, such as Cursor, Claude Code, VS Code, ChatGPT, etc. + + + + + + + + ```bash pip + pip install klavis + ``` + + ```bash npm + npm install klavis + ``` + + + + + ```bash Curl + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_KLAVIS_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "userId": "user123", + "servers": ["Gmail", "YouTube"] + }' + ``` + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_KLAVIS_API_KEY") + + response = klavis_client.mcp_server.create_strata_server( + user_id="user123", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + ``` + + ```typescript TypeScript + import { Klavis } from 'klavis'; + + const klavis = new Klavis.Client({ apiKey: 'YOUR_KLAVIS_API_KEY' }); + const strata = await klavis.strata.create({ + userId: 'user123', + servers: ['GMAIL', 'YOUTUBE'], + }); + ``` + + + **Response Information**: The API returns: + - `strataServerUrl`: The URL you'll use to connect your MCP client to the Strata MCP Server + - `oauthUrls`: Authorization links for services that require OAuth authentication + - `apiKeyUrls`: Links to configure API keys for services that use API key authentication + + + Full Strata API endpoints + + + + + ```bash Curl + Copy and paste the OAuth URL into your web browser + ``` + ```python Python + import webbrowser + + # Handle OAuth authorization if needed + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + ``` + + ```typescript TypeScript + // Handle OAuth authorization if needed + if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + if (typeof window !== 'undefined') { + window.open(oauthUrl); + } + console.log(`Please complete ${serverName} OAuth authorization at: ${oauthUrl}`); + // In a real application, you'd wait for OAuth completion via callback + await new Promise(resolve => { + console.log(`Press any key after completing ${serverName} OAuth authorization...`); + // This would be replaced with proper OAuth flow handling + resolve(null); + }); + } + } + ``` + + + + **Authentication Methods**: + - **API Key**: See [API Key authentication guide](/auth/api-key) for details. + - **OAuth**: See [OAuth authentication guide](/auth/oauth) for details. + + + + šŸŽ‰ **Your MCP Server URL is ready to use!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + ```python Python + import os + import asyncio + import webbrowser + + from klavis import Klavis + from klavis.types import McpServerName + from langchain_openai import ChatOpenAI + from langchain_mcp_adapters.client import MultiServerMCPClient + from langgraph.prebuilt import create_react_agent + + from dotenv import load_dotenv + load_dotenv() + + async def main(): + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and YouTube integrations + response = klavis_client.mcp_server.create_strata_server( + user_id="demo_user", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + + # Step 2: Handle OAuth authorization if needed + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + # Step 3: Create LangChain Agent with MCP Tools + mcp_client = MultiServerMCPClient({ + "strata": { + "transport": "streamable_http", + "url": response.strata_server_url, + } + }) + + # Get all available tools from Strata + tools = await mcp_client.get_tools() + # Setup LLM + llm = ChatOpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 4: Create LangChain agent with MCP tools + agent = create_react_agent( + model=llm, + tools=tools, + prompt=( + "You are a helpful assistant that can use MCP tools. " + ), + ) + + my_email = "golden-kpop@example.com" # TODO: Replace with your email + # Step 5: Invoke the agent + result = await agent.ainvoke({ + "messages": [{"role": "user", "content": f"summarize this video - https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7 and send the summary to my email {my_email}"}], + }) + + # Print only the final AI response content + print(result["messages"][-1].content) + + if __name__ == "__main__": + asyncio.run(main()) + ``` + + + + + ```python Python + import os + import asyncio + import webbrowser + + from klavis import Klavis + from klavis.types import McpServerName + from llama_index.llms.openai import OpenAI + from llama_index.core.agent.workflow import FunctionAgent + from llama_index.tools.mcp import BasicMCPClient + from llama_index.tools.mcp import ( + aget_tools_from_mcp_url, + ) + + from dotenv import load_dotenv + load_dotenv() + + async def main(): + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and YouTube integrations + response = klavis_client.mcp_server.create_strata_server( + user_id="1234", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + + # Step 2: Handle OAuth authorization if needed + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + # Get all available tools from Strata + tools = await aget_tools_from_mcp_url( + response.strata_server_url, + client=BasicMCPClient(response.strata_server_url) + ) + + # Setup LLM + llm = OpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 3: Create LlamaIndex agent with MCP tools + agent = FunctionAgent( + name="my_first_agent", + description="Agent using MCP-based tools", + tools=tools, + llm=llm, + system_prompt="You are an AI assistant that uses MCP tools.", + ) + + my_email = "golden-kpop@example.com" # TODO: Replace with your email + youtube_video_url = "https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7" # TODO: Replace with your favorite youtube video URL + # Step 4: Invoke the agent + response = await agent.run( + f"summarize this video - {youtube_video_url} and mail this summary to my email {my_email}" + ) + + print(response) + + if __name__ == "__main__": + asyncio.run(main()) + + ``` + + + + Coming soon + + + + ```python Python + import os + import asyncio + import webbrowser + + from dotenv import load_dotenv + from klavis import Klavis + from klavis.types import McpServerName + from autogen_agentchat.agents import AssistantAgent + from autogen_agentchat.ui import Console + from autogen_core import CancellationToken + from autogen_ext.models.openai import OpenAIChatCompletionClient + from autogen_ext.tools.mcp import StreamableHttpServerParams + from autogen_ext.tools.mcp import mcp_server_tools + + + load_dotenv() + + async def main() -> None: + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and YouTube integrations + response = klavis_client.mcp_server.create_strata_server( + user_id="demo_user", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + + # Handle OAuth authorization if required + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + server_params = StreamableHttpServerParams( + url=response.strata_server_url, + timeout=30.0, + sse_read_timeout=300.0, + terminate_on_close=True, + ) + + adapters = await mcp_server_tools(server_params) + + model_client = OpenAIChatCompletionClient(model="gpt-4") + agent = AssistantAgent( + name="MultiAI", + model_client=model_client, + tools=adapters, + system_message="You are a helpful AI assistant.", + ) + + await Console( + agent.run_stream( + task="Get my latest mails.", + cancellation_token=CancellationToken(), + ) + ) + if __name__ == "__main__": + asyncio.run(main()) + ``` + + + + + + + + + Visit https://github.com/Klavis-AI/klavis to view the source code and find more information + + + + + ```bash pipx + pipx install strata-mcp + ``` + ```bash pip + pip install strata-mcp + ``` + + + + Configure your MCP servers using the CLI tool. + + ```bash Add Server + strata add + ``` + ```bash List Servers + strata list + ``` + ```bash Enable Server + strata enable + ``` + + + + Start the Strata server to manage all your tools. + + ```bash Stdio Mode (Default) + strata + ``` + ```bash HTTP/SSE Mode + strata run --port 8080 + ``` + + + + Use the Strata tool to add your AI client. + + ```bash Claude Code + strata tool add claude + ``` + ```bash Cursor + strata tool add cursor + ``` + ```bash VSCode + strata tool add vscode + ``` + + + + + + + + If you're interested in 1:1 mapping between API and tool using our MCP Server Instance, [check here](/legacy/instance). + + +## Next steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + Explore available MCP servers + + + Progressive tool discovery across apps + + + REST endpoints and schemas + + diff --git a/docs/sdk/python.mdx b/docs/sdk/python.mdx new file mode 100644 index 00000000..83fc433b --- /dev/null +++ b/docs/sdk/python.mdx @@ -0,0 +1,229 @@ +--- +title: "Python" +description: "Get started with Klavis AI Python SDK for MCP integrations" +--- +## Installation + +```bash +pip install klavis +``` + +## Get Your API Key + +Sign up at [klavis.ai](https://klavis.ai) and create your API key. + +## Quick Start + +```python +from klavis import Klavis +from klavis.types import McpServerName + +klavis_client = Klavis(api_key="your-klavis-key") + +# Create a YouTube MCP server instance +youtube_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="user123", # Change to user id in your platform +) + +print(f"Server created: {youtube_server.server_url}") +``` + +## Integration with MCP Client + +If you already have an MCP client implementation in your codebase: + +```python +from klavis import Klavis +from klavis.types import McpServerName + +klavis_client = Klavis(api_key="your-klavis-key") + +# Create a YouTube MCP server instance +youtube_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="user123" +) + +print(f"Server created: {youtube_server.server_url}") +``` + +## Function Calling with OpenAI + +Integrate directly with OpenAI using function calling: + +```python +import json +from openai import OpenAI +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +OPENAI_MODEL = "gpt-4o-mini" + +openai_client = OpenAI(api_key="YOUR_OPENAI_API_KEY") +klavis_client = Klavis(api_key="YOUR_KLAVIS_API_KEY") + +# Create server instance +youtube_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="user123" +) + +# Get available tools in OpenAI format +tools = klavis_client.mcp_server.list_tools( + server_url=youtube_server.server_url, + format=ToolFormat.OPENAI +) + +# Initial conversation +messages = [{"role": "user", "content": "Summarize this video: https://youtube.com/watch?v=..."}] + +# First OpenAI call with function calling +response = openai_client.chat.completions.create( + model=OPENAI_MODEL, + messages=messages, + tools=tools.tools +) + +messages.append(response.choices[0].message) + +# Handle tool calls +if response.choices[0].message.tool_calls: + for tool_call in response.choices[0].message.tool_calls: + result = klavis_client.mcp_server.call_tools( + server_url=youtube_server.server_url, + tool_name=tool_call.function.name, + tool_args=json.loads(tool_call.function.arguments) + ) + + # Add tool result to conversation + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(result) + }) + +# Second OpenAI call to process tool results and generate final response +final_response = openai_client.chat.completions.create( + model=OPENAI_MODEL, + messages=messages +) + +print(final_response.choices[0].message.content) +``` + +## Authentication + +### OAuth Services + +For OAuth services like Gmail, Google Drive, etc.: + +```python +# Create server instance for OAuth service +server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="user123" +) + +# OAuth URL is provided in server.oauth_url +import webbrowser +webbrowser.open(server.oauth_url) +``` + +### API Key Services + +For services that require API keys: + +```python +# Set authentication token for API key services +klavis_client.mcp_server.set_auth_token( + instance_id=server.instance_id, + auth_token="your-service-api-key" +) +``` + +## Multi-Tool Workflows + +Combine multiple MCP servers for complex workflows: + +```python +# Create multiple servers +github_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GITHUB, + user_id="user123" +) + +slack_server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.SLACK, + user_id="user123" +) + +# Use tools from both servers in a single AI conversation +all_tools = [] +all_tools.extend(klavis_client.mcp_server.list_tools(github_server.server_url).tools) +all_tools.extend(klavis_client.mcp_server.list_tools(slack_server.server_url).tools) + +# Initialize conversation +messages = [{"role": "user", "content": "Create a GitHub issue and notify the team on Slack"}] + +# Loop to let LLM work with multiple tools +max_iterations = 5 +for iteration in range(max_iterations): + response = openai_client.chat.completions.create( + model="gpt-4", + messages=messages, + tools=all_tools + ) + + messages.append(response.choices[0].message) + + # Check if LLM wants to use tools + if response.choices[0].message.tool_calls: + for tool_call in response.choices[0].message.tool_calls: + # Determine which server to use based on tool name + server_url = github_server.server_url if "github" in tool_call.function.name else slack_server.server_url + + # Execute tool + result = klavis_client.mcp_server.call_tools( + server_url=server_url, + tool_name=tool_call.function.name, + tool_args=json.loads(tool_call.function.arguments) + ) + + # Add tool result to conversation + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(result) + }) + else: + # LLM finished the task + print(f"Task completed in {iteration + 1} iterations") + print(response.choices[0].message.content) + break +``` + +## Error Handling + +```python +from klavis.exceptions import KlavisError + +try: + server = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="user123" + ) +except KlavisError as e: + print(f"Error creating server: {e}") +``` + +## Next Steps + + + + Complete API documentation + + + Explore available MCP servers + + \ No newline at end of file diff --git a/docs/sdk/typescript.mdx b/docs/sdk/typescript.mdx new file mode 100644 index 00000000..32de518c --- /dev/null +++ b/docs/sdk/typescript.mdx @@ -0,0 +1,304 @@ +--- +title: "TypeScript" +description: "Get started with Klavis AI TypeScript SDK for MCP integrations" +--- + +## Installation + +```bash +npm install klavis +``` + +## Get Your API Key + +Sign up at [klavis.ai](https://klavis.ai) and create your API key. + +## Quick Start + +```javascript +import { KlavisClient, Klavis } from 'klavis'; + +const klavisClient = new KlavisClient({ apiKey: 'your-klavis-key' }); + +// Create Gmail MCP server with OAuth +const gmailServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "user123" +}); + +// Gmail needs OAuth flow +await window.open(gmailServer.oauthUrl); +``` + +## Integration with MCP Client + +If you already have an MCP client implementation in your codebase: + +```javascript +import { KlavisClient, Klavis } from 'klavis'; + +const klavisClient = new KlavisClient({ apiKey: 'your-klavis-key' }); + +// Create Gmail MCP server with OAuth +const gmailServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "user123" +}); + +// Gmail needs OAuth flow +if (gmailServer.oauthUrl) { + await window.open(gmailServer.oauthUrl); +} +``` + +## Function Calling with OpenAI + +Integrate directly with OpenAI using function calling: + +```javascript +import OpenAI from 'openai'; +import { KlavisClient, Klavis } from 'klavis'; + +// Constants +const OPENAI_MODEL = "gpt-4o-mini"; +const EMAIL_RECIPIENT = "john@example.com"; +const EMAIL_SUBJECT = "Hello from Klavis"; +const EMAIL_BODY = "This email was sent using Klavis MCP Server!"; + +const openaiClient = new OpenAI({ apiKey: 'your-openai-key' }); +const klavisClient = new KlavisClient({ apiKey: 'your-klavis-key' }); + +// Create server and get tools +const gmailServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "user123" +}); + +// Handle OAuth authentication for Gmail +if (gmailServer.oauthUrl) { + console.log("Please complete OAuth authorization:", gmailServer.oauthUrl); + await window.open(gmailServer.oauthUrl); +} + +const tools = await klavisClient.mcpServer.listTools({ + serverUrl: gmailServer.serverUrl, + format: Klavis.ToolFormat.Openai +}); + +// Initial conversation +const messages = [{ + role: "user", + content: `Please send an email to ${EMAIL_RECIPIENT} with subject "${EMAIL_SUBJECT}" and body "${EMAIL_BODY}"` +}]; + +// First OpenAI call with function calling +const response = await openaiClient.chat.completions.create({ + model: OPENAI_MODEL, + messages: messages, + tools: tools.tools +}); + +messages.push(response.choices[0].message); + +// Handle tool calls +if (response.choices[0].message.tool_calls) { + for (const toolCall of response.choices[0].message.tool_calls) { + const result = await klavisClient.mcpServer.callTools({ + serverUrl: gmailServer.serverUrl, + toolName: toolCall.function.name, + toolArgs: JSON.parse(toolCall.function.arguments) + }); + + // Add tool result to conversation + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(result) + }); + } +} + +// Second OpenAI call to process tool results and generate final response +const finalResponse = await openaiClient.chat.completions.create({ + model: OPENAI_MODEL, + messages: messages +}); + +console.log(finalResponse.choices[0].message.content); +``` + +## Authentication + +### OAuth Services + +For OAuth services like Gmail, Google Drive, etc.: + +```javascript +// Create server instance for OAuth service +const server = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "user123" +}); + +// Handle OAuth flow +if (server.oauthUrl) { + console.log("Please complete OAuth authorization:", server.oauthUrl); + window.open(server.oauthUrl); +} +``` + +### API Key Services + +For services that require API keys: + +```javascript +// Set authentication token for API key services +await klavisClient.mcpServer.setAuthToken({ + instanceId: server.instanceId, + authData: { + token: "your-service-api-key" + } +}); +``` + +## Multi-Tool Workflows + +Combine multiple MCP servers for complex workflows: + +```javascript +// Create multiple servers +const githubServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Github, + userId: "user123" +}); + +const slackServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Slack, + userId: "user123" +}); + +// Get tools from both servers +const githubTools = await klavisClient.mcpServer.listTools({ + serverUrl: githubServer.serverUrl, + format: Klavis.ToolFormat.Openai +}); + +const slackTools = await klavisClient.mcpServer.listTools({ + serverUrl: slackServer.serverUrl, + format: Klavis.ToolFormat.Openai +}); + +// Combine all tools +const allTools = [...githubTools.tools, ...slackTools.tools]; + +// Initialize conversation +const messages = [{ + role: "user", + content: "Create a GitHub issue and notify the team on Slack" +}]; + +// Loop to let LLM work with multiple tools +const maxIterations = 5; +for (let iteration = 0; iteration < maxIterations; iteration++) { + const response = await openaiClient.chat.completions.create({ + model: "gpt-4", + messages: messages, + tools: allTools + }); + + messages.push(response.choices[0].message); + + // Check if LLM wants to use tools + if (response.choices[0].message.tool_calls) { + for (const toolCall of response.choices[0].message.tool_calls) { + // Determine which server to use based on tool name + const serverUrl = toolCall.function.name.includes('github') + ? githubServer.serverUrl + : slackServer.serverUrl; + + // Execute tool + const result = await klavisClient.mcpServer.callTools({ + serverUrl: serverUrl, + toolName: toolCall.function.name, + toolArgs: JSON.parse(toolCall.function.arguments) + }); + + // Add tool result to conversation + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(result) + }); + } + } else { + // LLM finished the task + console.log(`Task completed in ${iteration + 1} iterations`); + console.log(response.choices[0].message.content); + break; + } +} +``` + +## Error Handling + +```javascript +import { KlavisError } from 'klavis'; + +try { + const server = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Youtube, + userId: "user123" + }); +} catch (error) { + if (error instanceof KlavisError) { + console.error(`Klavis Error: ${error.message}`); + } else { + console.error(`Unexpected error: ${error}`); + } +} +``` + +## TypeScript Types + +The SDK is fully typed for better development experience: + +```typescript +import { KlavisClient, Klavis } from 'klavis'; +import type { + ServerInstance, + ToolsResponse, + CallToolResponse +} from 'klavis/types'; + +const klavisClient = new KlavisClient({ apiKey: 'your-klavis-key' }); + +// Type-safe server creation +const server: ServerInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Github, + userId: "user123" +}); + +// Type-safe tool listing +const tools: ToolsResponse = await klavisClient.mcpServer.listTools({ + serverUrl: server.serverUrl, + format: Klavis.ToolFormat.Openai +}); + +// Type-safe tool calling +const result: CallToolResponse = await klavisClient.mcpServer.callTools({ + serverUrl: server.serverUrl, + toolName: "get_repository", + toolArgs: { owner: "octocat", repo: "Hello-World" } +}); +``` + +## Next Steps + + + + Complete API documentation + + + Explore available MCP servers + + \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/claude-card.mdx b/docs/snippets/ai-platform-card/claude-card.mdx new file mode 100644 index 00000000..529eb593 --- /dev/null +++ b/docs/snippets/ai-platform-card/claude-card.mdx @@ -0,0 +1,24 @@ + + + + } + href="https://github.com/ai-platform-integration/claude" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/crewai-card.mdx b/docs/snippets/ai-platform-card/crewai-card.mdx new file mode 100644 index 00000000..101e42dc --- /dev/null +++ b/docs/snippets/ai-platform-card/crewai-card.mdx @@ -0,0 +1,27 @@ + + {"CrewAI"} + + + + } + href="https://github.com/ai-platform-integration/crewai" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/fireworks-ai-card.mdx b/docs/snippets/ai-platform-card/fireworks-ai-card.mdx new file mode 100644 index 00000000..d1033829 --- /dev/null +++ b/docs/snippets/ai-platform-card/fireworks-ai-card.mdx @@ -0,0 +1,25 @@ + + {"Fireworks"} + + + } + href="https://github.com/ai-platform-integration/fireworks-ai" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/gemini-card.mdx b/docs/snippets/ai-platform-card/gemini-card.mdx new file mode 100644 index 00000000..1cdadb65 --- /dev/null +++ b/docs/snippets/ai-platform-card/gemini-card.mdx @@ -0,0 +1,23 @@ + + {"Gemini"} + + + } + href="https://github.com/ai-platform-integration/gemini" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/google-adk-card.mdx b/docs/snippets/ai-platform-card/google-adk-card.mdx new file mode 100644 index 00000000..8eedfa8f --- /dev/null +++ b/docs/snippets/ai-platform-card/google-adk-card.mdx @@ -0,0 +1,5 @@ + diff --git a/docs/snippets/ai-platform-card/langchain-card.mdx b/docs/snippets/ai-platform-card/langchain-card.mdx new file mode 100644 index 00000000..d4406791 --- /dev/null +++ b/docs/snippets/ai-platform-card/langchain-card.mdx @@ -0,0 +1,29 @@ + + {"LangChain"} + + + + } + href="https://github.com/ai-platform-integration/langchain" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/llamaindex-card.mdx b/docs/snippets/ai-platform-card/llamaindex-card.mdx new file mode 100644 index 00000000..a20af1a4 --- /dev/null +++ b/docs/snippets/ai-platform-card/llamaindex-card.mdx @@ -0,0 +1,38 @@ + + {"LlamaIndex"} + + + + + + + + + + + } + href="https://github.com/ai-platform-integration/llamaindex" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/mastra-card.mdx b/docs/snippets/ai-platform-card/mastra-card.mdx new file mode 100644 index 00000000..894ec5cc --- /dev/null +++ b/docs/snippets/ai-platform-card/mastra-card.mdx @@ -0,0 +1,34 @@ + + + + + + + + + } + href="https://github.com/ai-platform-integration/mastra" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/openai-card.mdx b/docs/snippets/ai-platform-card/openai-card.mdx new file mode 100644 index 00000000..8cb1725f --- /dev/null +++ b/docs/snippets/ai-platform-card/openai-card.mdx @@ -0,0 +1,16 @@ + + {"OpenAI icon"} + + + } + href="https://github.com/ai-platform-integration/openai" +> \ No newline at end of file diff --git a/docs/snippets/ai-platform-card/together-ai-card.mdx b/docs/snippets/ai-platform-card/together-ai-card.mdx new file mode 100644 index 00000000..e508ca31 --- /dev/null +++ b/docs/snippets/ai-platform-card/together-ai-card.mdx @@ -0,0 +1,26 @@ + + {"together.ai"} + + + + } + href="https://github.com/ai-platform-integration/together-ai" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/affinity-card.mdx b/docs/snippets/mcp-server-card/affinity-card.mdx new file mode 100644 index 00000000..d9d7f611 --- /dev/null +++ b/docs/snippets/mcp-server-card/affinity-card.mdx @@ -0,0 +1,22 @@ + + {"Affinity icon"} + + + } + href="https://github.com/mcp-server/affinity" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/airtable-card.mdx b/docs/snippets/mcp-server-card/airtable-card.mdx new file mode 100644 index 00000000..dbb9d158 --- /dev/null +++ b/docs/snippets/mcp-server-card/airtable-card.mdx @@ -0,0 +1,30 @@ + + + + + + + } + href="https://github.com/mcp-server/airtable" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/asana-card.mdx b/docs/snippets/mcp-server-card/asana-card.mdx new file mode 100644 index 00000000..e373608e --- /dev/null +++ b/docs/snippets/mcp-server-card/asana-card.mdx @@ -0,0 +1,18 @@ + + + + } + href="https://github.com/mcp-server/asana" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/attio-card.mdx b/docs/snippets/mcp-server-card/attio-card.mdx new file mode 100644 index 00000000..30ee912c --- /dev/null +++ b/docs/snippets/mcp-server-card/attio-card.mdx @@ -0,0 +1,19 @@ + + + + } + href="https://github.com/mcp-server/attio" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/box-card.mdx b/docs/snippets/mcp-server-card/box-card.mdx new file mode 100644 index 00000000..200e56f1 --- /dev/null +++ b/docs/snippets/mcp-server-card/box-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/brave-search-card.mdx b/docs/snippets/mcp-server-card/brave-search-card.mdx new file mode 100644 index 00000000..94595486 --- /dev/null +++ b/docs/snippets/mcp-server-card/brave-search-card.mdx @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + } + href="https://github.com/mcp-server/brave_search" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/calcom-card.mdx b/docs/snippets/mcp-server-card/calcom-card.mdx new file mode 100644 index 00000000..4e18d915 --- /dev/null +++ b/docs/snippets/mcp-server-card/calcom-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/calendly-card.mdx b/docs/snippets/mcp-server-card/calendly-card.mdx new file mode 100644 index 00000000..79428641 --- /dev/null +++ b/docs/snippets/mcp-server-card/calendly-card.mdx @@ -0,0 +1,36 @@ + + {"Calendly icon"} + + + + + + + + + + + } + href="https://github.com/mcp-server/calendly" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/clickup-card.mdx b/docs/snippets/mcp-server-card/clickup-card.mdx new file mode 100644 index 00000000..d52c36ca --- /dev/null +++ b/docs/snippets/mcp-server-card/clickup-card.mdx @@ -0,0 +1,20 @@ + + + + + + + + + + } + href="https://github.com/mcp-server/clickup" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/close-card.mdx b/docs/snippets/mcp-server-card/close-card.mdx new file mode 100644 index 00000000..d47b7339 --- /dev/null +++ b/docs/snippets/mcp-server-card/close-card.mdx @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + } + href="https://github.com/mcp-server/close" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/cloudflare-card.mdx b/docs/snippets/mcp-server-card/cloudflare-card.mdx new file mode 100644 index 00000000..eaa9e154 --- /dev/null +++ b/docs/snippets/mcp-server-card/cloudflare-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/coinbase-card.mdx b/docs/snippets/mcp-server-card/coinbase-card.mdx new file mode 100644 index 00000000..7e08d289 --- /dev/null +++ b/docs/snippets/mcp-server-card/coinbase-card.mdx @@ -0,0 +1,28 @@ + + {"Coinbase icon"} + + + + } + href="https://github.com/mcp-server/coinbase" +> diff --git a/docs/snippets/mcp-server-card/confluence-card.mdx b/docs/snippets/mcp-server-card/confluence-card.mdx new file mode 100644 index 00000000..e31d3134 --- /dev/null +++ b/docs/snippets/mcp-server-card/confluence-card.mdx @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + } + href="https://github.com/mcp-server/confluence" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/deep-research-card.mdx b/docs/snippets/mcp-server-card/deep-research-card.mdx new file mode 100644 index 00000000..5f843519 --- /dev/null +++ b/docs/snippets/mcp-server-card/deep-research-card.mdx @@ -0,0 +1,12 @@ + + + + {"\uD83D\uDD25"} + + + } + href="https://github.com/mcp-server/firecrawl-deep-research" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/dialpad-card.mdx b/docs/snippets/mcp-server-card/dialpad-card.mdx new file mode 100644 index 00000000..5c9879c2 --- /dev/null +++ b/docs/snippets/mcp-server-card/dialpad-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/discord-card.mdx b/docs/snippets/mcp-server-card/discord-card.mdx new file mode 100644 index 00000000..e8292efa --- /dev/null +++ b/docs/snippets/mcp-server-card/discord-card.mdx @@ -0,0 +1,19 @@ + + + + } + href="https://github.com/mcp-server/discord" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/doc2markdown-card.mdx b/docs/snippets/mcp-server-card/doc2markdown-card.mdx new file mode 100644 index 00000000..05dfebdf --- /dev/null +++ b/docs/snippets/mcp-server-card/doc2markdown-card.mdx @@ -0,0 +1,17 @@ + + + + } + href="https://github.com/mcp-server/doc2markdown" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/docusign-card.mdx b/docs/snippets/mcp-server-card/docusign-card.mdx new file mode 100644 index 00000000..1b63b2d5 --- /dev/null +++ b/docs/snippets/mcp-server-card/docusign-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/dropbox-card.mdx b/docs/snippets/mcp-server-card/dropbox-card.mdx new file mode 100644 index 00000000..1c5f5809 --- /dev/null +++ b/docs/snippets/mcp-server-card/dropbox-card.mdx @@ -0,0 +1,43 @@ + + + + + + + + + + + } + href="https://github.com/mcp-server/dropbox" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/elevenlabs-card.mdx b/docs/snippets/mcp-server-card/elevenlabs-card.mdx new file mode 100644 index 00000000..7b1b6645 --- /dev/null +++ b/docs/snippets/mcp-server-card/elevenlabs-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/exa-card.mdx b/docs/snippets/mcp-server-card/exa-card.mdx new file mode 100644 index 00000000..48ed5ee8 --- /dev/null +++ b/docs/snippets/mcp-server-card/exa-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/fathom-card.mdx b/docs/snippets/mcp-server-card/fathom-card.mdx new file mode 100644 index 00000000..e6c590bb --- /dev/null +++ b/docs/snippets/mcp-server-card/fathom-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/figma-card.mdx b/docs/snippets/mcp-server-card/figma-card.mdx new file mode 100644 index 00000000..5f3d288b --- /dev/null +++ b/docs/snippets/mcp-server-card/figma-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/fireflies-card.mdx b/docs/snippets/mcp-server-card/fireflies-card.mdx new file mode 100644 index 00000000..d73ec8fd --- /dev/null +++ b/docs/snippets/mcp-server-card/fireflies-card.mdx @@ -0,0 +1,5 @@ + diff --git a/docs/snippets/mcp-server-card/freshdesk-card.mdx b/docs/snippets/mcp-server-card/freshdesk-card.mdx new file mode 100644 index 00000000..35f8718f --- /dev/null +++ b/docs/snippets/mcp-server-card/freshdesk-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/github-card.mdx b/docs/snippets/mcp-server-card/github-card.mdx new file mode 100644 index 00000000..5f50cfed --- /dev/null +++ b/docs/snippets/mcp-server-card/github-card.mdx @@ -0,0 +1,20 @@ + + + + } + href="https://github.com/mcp-server/github" +/> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/gitlab-card.mdx b/docs/snippets/mcp-server-card/gitlab-card.mdx new file mode 100644 index 00000000..9ef519f9 --- /dev/null +++ b/docs/snippets/mcp-server-card/gitlab-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/gmail-card.mdx b/docs/snippets/mcp-server-card/gmail-card.mdx new file mode 100644 index 00000000..504a4818 --- /dev/null +++ b/docs/snippets/mcp-server-card/gmail-card.mdx @@ -0,0 +1,48 @@ + + + + + + + + } + href="https://github.com/mcp-server/gmail" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/gong-card.mdx b/docs/snippets/mcp-server-card/gong-card.mdx new file mode 100644 index 00000000..1af3adea --- /dev/null +++ b/docs/snippets/mcp-server-card/gong-card.mdx @@ -0,0 +1,24 @@ + + + + } + href="https://github.com/mcp-server/gong" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/google-calendar-card.mdx b/docs/snippets/mcp-server-card/google-calendar-card.mdx new file mode 100644 index 00000000..dadd6202 --- /dev/null +++ b/docs/snippets/mcp-server-card/google-calendar-card.mdx @@ -0,0 +1,45 @@ + + + + + + + + + + + } + href="https://github.com/mcp-server/google_calendar" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/google-docs-card.mdx b/docs/snippets/mcp-server-card/google-docs-card.mdx new file mode 100644 index 00000000..f1c92bf8 --- /dev/null +++ b/docs/snippets/mcp-server-card/google-docs-card.mdx @@ -0,0 +1,37 @@ + + {"Google Docs icon"} + + + + + } + href="https://github.com/mcp-server/google_docs" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/google-drive-card.mdx b/docs/snippets/mcp-server-card/google-drive-card.mdx new file mode 100644 index 00000000..5cac5ee5 --- /dev/null +++ b/docs/snippets/mcp-server-card/google-drive-card.mdx @@ -0,0 +1,38 @@ + + + + + + + + + } + href="https://github.com/mcp-server/google_drive" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/google-jobs-card.mdx b/docs/snippets/mcp-server-card/google-jobs-card.mdx new file mode 100644 index 00000000..83d0041c --- /dev/null +++ b/docs/snippets/mcp-server-card/google-jobs-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/google-sheets-card.mdx b/docs/snippets/mcp-server-card/google-sheets-card.mdx new file mode 100644 index 00000000..e3c52680 --- /dev/null +++ b/docs/snippets/mcp-server-card/google-sheets-card.mdx @@ -0,0 +1,12 @@ + + + + + + + } + href="https://github.com/mcp-server/google_sheets" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/hacker-news-card.mdx b/docs/snippets/mcp-server-card/hacker-news-card.mdx new file mode 100644 index 00000000..d236ff63 --- /dev/null +++ b/docs/snippets/mcp-server-card/hacker-news-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/heygen-card.mdx b/docs/snippets/mcp-server-card/heygen-card.mdx new file mode 100644 index 00000000..79dd3b39 --- /dev/null +++ b/docs/snippets/mcp-server-card/heygen-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/hubspot-card.mdx b/docs/snippets/mcp-server-card/hubspot-card.mdx new file mode 100644 index 00000000..979ca528 --- /dev/null +++ b/docs/snippets/mcp-server-card/hubspot-card.mdx @@ -0,0 +1,24 @@ + + + + } + href="https://github.com/mcp-server/hubspot" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/jira-card.mdx b/docs/snippets/mcp-server-card/jira-card.mdx new file mode 100644 index 00000000..13878ad4 --- /dev/null +++ b/docs/snippets/mcp-server-card/jira-card.mdx @@ -0,0 +1,7 @@ + + } + href="https://github.com/mcp-server/jira" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/klavis-reportgen-card.mdx b/docs/snippets/mcp-server-card/klavis-reportgen-card.mdx new file mode 100644 index 00000000..5d85f446 --- /dev/null +++ b/docs/snippets/mcp-server-card/klavis-reportgen-card.mdx @@ -0,0 +1,7 @@ + + } + href="https://github.com/mcp-server/klavis-reportgen" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/klavis-reportgen.mdx b/docs/snippets/mcp-server-card/klavis-reportgen.mdx new file mode 100644 index 00000000..f8454946 --- /dev/null +++ b/docs/snippets/mcp-server-card/klavis-reportgen.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/klaviyo-card.mdx b/docs/snippets/mcp-server-card/klaviyo-card.mdx new file mode 100644 index 00000000..fcab5d9f --- /dev/null +++ b/docs/snippets/mcp-server-card/klaviyo-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/linear-card.mdx b/docs/snippets/mcp-server-card/linear-card.mdx new file mode 100644 index 00000000..6de03e1f --- /dev/null +++ b/docs/snippets/mcp-server-card/linear-card.mdx @@ -0,0 +1,16 @@ + + + + } + href="https://github.com/mcp-server/linear" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/linkedin-card.mdx b/docs/snippets/mcp-server-card/linkedin-card.mdx new file mode 100644 index 00000000..e381b01a --- /dev/null +++ b/docs/snippets/mcp-server-card/linkedin-card.mdx @@ -0,0 +1,24 @@ + + + + + + + } + href="https://github.com/mcp-server/linkedin" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/mailchimp-card.mdx b/docs/snippets/mcp-server-card/mailchimp-card.mdx new file mode 100644 index 00000000..dd6166dd --- /dev/null +++ b/docs/snippets/mcp-server-card/mailchimp-card.mdx @@ -0,0 +1,28 @@ + + {"Mailchimp Freddie icon"} + + + + + + } + href="https://github.com/mcp-server/mailchimp" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/markdown2doc-card.mdx b/docs/snippets/mcp-server-card/markdown2doc-card.mdx new file mode 100644 index 00000000..b56fbee0 --- /dev/null +++ b/docs/snippets/mcp-server-card/markdown2doc-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/memo-card.mdx b/docs/snippets/mcp-server-card/memo-card.mdx new file mode 100644 index 00000000..030c2d71 --- /dev/null +++ b/docs/snippets/mcp-server-card/memo-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/metabase-card.mdx b/docs/snippets/mcp-server-card/metabase-card.mdx new file mode 100644 index 00000000..e6b93c18 --- /dev/null +++ b/docs/snippets/mcp-server-card/metabase-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/microsoft-teams-card.mdx b/docs/snippets/mcp-server-card/microsoft-teams-card.mdx new file mode 100644 index 00000000..ef340e09 --- /dev/null +++ b/docs/snippets/mcp-server-card/microsoft-teams-card.mdx @@ -0,0 +1,14 @@ + + + + + + + + + } + href="https://github.com/mcp-server/microsoft_teams" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/mixpanel-card.mdx b/docs/snippets/mcp-server-card/mixpanel-card.mdx new file mode 100644 index 00000000..30e347bf --- /dev/null +++ b/docs/snippets/mcp-server-card/mixpanel-card.mdx @@ -0,0 +1,15 @@ + + + + } + href="https://github.com/mcp-server/mixpanel" +> diff --git a/docs/snippets/mcp-server-card/monday-card.mdx b/docs/snippets/mcp-server-card/monday-card.mdx new file mode 100644 index 00000000..18519a75 --- /dev/null +++ b/docs/snippets/mcp-server-card/monday-card.mdx @@ -0,0 +1,33 @@ + + + + + + + + } + href="https://github.com/mcp-server/monday" +> diff --git a/docs/snippets/mcp-server-card/moneybird-card.mdx b/docs/snippets/mcp-server-card/moneybird-card.mdx new file mode 100644 index 00000000..9a784ed8 --- /dev/null +++ b/docs/snippets/mcp-server-card/moneybird-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/motion-card.mdx b/docs/snippets/mcp-server-card/motion-card.mdx new file mode 100644 index 00000000..0f83867c --- /dev/null +++ b/docs/snippets/mcp-server-card/motion-card.mdx @@ -0,0 +1,16 @@ + + + + + } + href="https://github.com/mcp-server/motion" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/no-code.mdx b/docs/snippets/mcp-server-card/no-code.mdx new file mode 100644 index 00000000..33b60169 --- /dev/null +++ b/docs/snippets/mcp-server-card/no-code.mdx @@ -0,0 +1,11 @@ +## No-Code + + **Connect to enterprise-grade MCP servers instantly!** + + ![Klavis MCP Servers - No Code Creation](/images/mcp-server.png) + + + [Get Started →](https://www.klavis.ai/home/mcp-servers) + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/notion-card.mdx b/docs/snippets/mcp-server-card/notion-card.mdx new file mode 100644 index 00000000..5698d5cb --- /dev/null +++ b/docs/snippets/mcp-server-card/notion-card.mdx @@ -0,0 +1,31 @@ + + + + + } + href="https://github.com/mcp-server/notion" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/onedrive-card.mdx b/docs/snippets/mcp-server-card/onedrive-card.mdx new file mode 100644 index 00000000..e9fc634b --- /dev/null +++ b/docs/snippets/mcp-server-card/onedrive-card.mdx @@ -0,0 +1,15 @@ + + OfficeCore10_32x_24x_20x_16x_01-22-2019 + + + + + + + + } + href="https://github.com/mcp-server/onedrive" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/openrouter-card.mdx b/docs/snippets/mcp-server-card/openrouter-card.mdx new file mode 100644 index 00000000..c371396c --- /dev/null +++ b/docs/snippets/mcp-server-card/openrouter-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/outlook-card.mdx b/docs/snippets/mcp-server-card/outlook-card.mdx new file mode 100644 index 00000000..5f8aed90 --- /dev/null +++ b/docs/snippets/mcp-server-card/outlook-card.mdx @@ -0,0 +1,12 @@ + + + + + + + } + href="https://github.com/mcp-server/outlook" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/pagerduty-card.mdx b/docs/snippets/mcp-server-card/pagerduty-card.mdx new file mode 100644 index 00000000..d4bf2da4 --- /dev/null +++ b/docs/snippets/mcp-server-card/pagerduty-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/perplexity-card.mdx b/docs/snippets/mcp-server-card/perplexity-card.mdx new file mode 100644 index 00000000..043465cf --- /dev/null +++ b/docs/snippets/mcp-server-card/perplexity-card.mdx @@ -0,0 +1,24 @@ + + + + } + href="https://github.com/mcp-server/perplexity" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/pipedrive-card.mdx b/docs/snippets/mcp-server-card/pipedrive-card.mdx new file mode 100644 index 00000000..d32f07b7 --- /dev/null +++ b/docs/snippets/mcp-server-card/pipedrive-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/plai-card.mdx b/docs/snippets/mcp-server-card/plai-card.mdx new file mode 100644 index 00000000..bfb6df52 --- /dev/null +++ b/docs/snippets/mcp-server-card/plai-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/postgres-card.mdx b/docs/snippets/mcp-server-card/postgres-card.mdx new file mode 100644 index 00000000..971ba680 --- /dev/null +++ b/docs/snippets/mcp-server-card/postgres-card.mdx @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + } + href="https://github.com/mcp-server/postgres" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/posthog-card.mdx b/docs/snippets/mcp-server-card/posthog-card.mdx new file mode 100644 index 00000000..c26b22d3 --- /dev/null +++ b/docs/snippets/mcp-server-card/posthog-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/quickbooks-card.mdx b/docs/snippets/mcp-server-card/quickbooks-card.mdx new file mode 100644 index 00000000..abc20acb --- /dev/null +++ b/docs/snippets/mcp-server-card/quickbooks-card.mdx @@ -0,0 +1,7 @@ + + } + href="https://github.com/mcp-server/quickbooks" +> diff --git a/docs/snippets/mcp-server-card/resend-card.mdx b/docs/snippets/mcp-server-card/resend-card.mdx new file mode 100644 index 00000000..aef3a769 --- /dev/null +++ b/docs/snippets/mcp-server-card/resend-card.mdx @@ -0,0 +1,9 @@ + + + + } + href="https://github.com/mcp-server/resend" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/salesforce-card.mdx b/docs/snippets/mcp-server-card/salesforce-card.mdx new file mode 100644 index 00000000..3336e5b9 --- /dev/null +++ b/docs/snippets/mcp-server-card/salesforce-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/sendgrid-card.mdx b/docs/snippets/mcp-server-card/sendgrid-card.mdx new file mode 100644 index 00000000..75b5f9ad --- /dev/null +++ b/docs/snippets/mcp-server-card/sendgrid-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/servicenow-card.mdx b/docs/snippets/mcp-server-card/servicenow-card.mdx new file mode 100644 index 00000000..9e5cb5b9 --- /dev/null +++ b/docs/snippets/mcp-server-card/servicenow-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/shopify-card.mdx b/docs/snippets/mcp-server-card/shopify-card.mdx new file mode 100644 index 00000000..f1f9f28c --- /dev/null +++ b/docs/snippets/mcp-server-card/shopify-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/slack-card.mdx b/docs/snippets/mcp-server-card/slack-card.mdx new file mode 100644 index 00000000..df86f795 --- /dev/null +++ b/docs/snippets/mcp-server-card/slack-card.mdx @@ -0,0 +1,7 @@ + + } + href="https://github.com/mcp-server/slack" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/stripe-card.mdx b/docs/snippets/mcp-server-card/stripe-card.mdx new file mode 100644 index 00000000..eb50e6ad --- /dev/null +++ b/docs/snippets/mcp-server-card/stripe-card.mdx @@ -0,0 +1,28 @@ + + + + + } + href="https://github.com/mcp-server/stripe" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/supabase-card.mdx b/docs/snippets/mcp-server-card/supabase-card.mdx new file mode 100644 index 00000000..0fcb73d4 --- /dev/null +++ b/docs/snippets/mcp-server-card/supabase-card.mdx @@ -0,0 +1,16 @@ + + {"Supabase icon"} + + + } + href="https://github.com/mcp-server/supabase" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/tavily-card.mdx b/docs/snippets/mcp-server-card/tavily-card.mdx new file mode 100644 index 00000000..0887a2b8 --- /dev/null +++ b/docs/snippets/mcp-server-card/tavily-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/vercel-card.mdx b/docs/snippets/mcp-server-card/vercel-card.mdx new file mode 100644 index 00000000..4a47bfa1 --- /dev/null +++ b/docs/snippets/mcp-server-card/vercel-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/web-search-card.mdx b/docs/snippets/mcp-server-card/web-search-card.mdx new file mode 100644 index 00000000..143adfe5 --- /dev/null +++ b/docs/snippets/mcp-server-card/web-search-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/whatsapp-card.mdx b/docs/snippets/mcp-server-card/whatsapp-card.mdx new file mode 100644 index 00000000..b94057f2 --- /dev/null +++ b/docs/snippets/mcp-server-card/whatsapp-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/wordpress-card.mdx b/docs/snippets/mcp-server-card/wordpress-card.mdx new file mode 100644 index 00000000..8545c15b --- /dev/null +++ b/docs/snippets/mcp-server-card/wordpress-card.mdx @@ -0,0 +1,17 @@ + + + + + + + + } + href="https://github.com/mcp-server/wordpress" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/youtube-card.mdx b/docs/snippets/mcp-server-card/youtube-card.mdx new file mode 100644 index 00000000..1533f897 --- /dev/null +++ b/docs/snippets/mcp-server-card/youtube-card.mdx @@ -0,0 +1,35 @@ + + + + + } + href="https://github.com/mcp-server/youtube" +> \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/zendesk-card.mdx b/docs/snippets/mcp-server-card/zendesk-card.mdx new file mode 100644 index 00000000..2ea68e6d --- /dev/null +++ b/docs/snippets/mcp-server-card/zendesk-card.mdx @@ -0,0 +1,18 @@ + + + + } + href="https://github.com/mcp-server/zendesk" +> \ No newline at end of file diff --git a/docs/snippets/sdk-card/python-sdk-card.mdx b/docs/snippets/sdk-card/python-sdk-card.mdx new file mode 100644 index 00000000..f27401fe --- /dev/null +++ b/docs/snippets/sdk-card/python-sdk-card.mdx @@ -0,0 +1,4 @@ + \ No newline at end of file diff --git a/docs/snippets/sdk-card/typescript-sdk-card.mdx b/docs/snippets/sdk-card/typescript-sdk-card.mdx new file mode 100644 index 00000000..4614fe64 --- /dev/null +++ b/docs/snippets/sdk-card/typescript-sdk-card.mdx @@ -0,0 +1,4 @@ + \ No newline at end of file diff --git a/docs/snippets/snippet-intro.mdx b/docs/snippets/snippet-intro.mdx new file mode 100644 index 00000000..c57e7c75 --- /dev/null +++ b/docs/snippets/snippet-intro.mdx @@ -0,0 +1,4 @@ +One of the core principles of software development is DRY (Don't Repeat +Yourself). This is a principle that apply to documentation as +well. If you find yourself repeating the same content in multiple places, you +should consider creating a custom snippet to keep your content in sync. diff --git a/examples/claude/Use_Klavis_with_Claude.ipynb b/examples/claude/Use_Klavis_with_Claude.ipynb new file mode 100644 index 00000000..069f3eaa --- /dev/null +++ b/examples/claude/Use_Klavis_with_Claude.ipynb @@ -0,0 +1,409 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/claude/Use_Klavis_with_Claude.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# Claude + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to use Anthropic's Claude with tool use (function calling) with Klavis MCP (Model Context Protocol) servers.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "- **Anthropic API key** - Get at [console.anthropic.com](https://console.anthropic.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU anthropic klavis" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "from anthropic import Anthropic\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables\n", + "os.environ[\"ANTHROPIC_API_KEY\"] = \"YOUR_ANTHROPIC_API_KEY\" # Replace with your actual Anthropic API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 1 : Claude + YouTube MCP Server\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 1 - Create YouTube MCP Server using Klavis\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# print(f\"\ud83d\udd17 YouTube MCP server created at: {youtube_mcp_instance.server_url}, and the instance id is {youtube_mcp_instance.instance_id}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - Create general method to use MCP Server with Claude\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def claude_with_mcp_server(mcp_server_url: str, user_query: str):\n", + " claude_client = Anthropic(api_key=os.getenv(\"ANTHROPIC_API_KEY\"))\n", + "\n", + " messages = [\n", + " {\"role\": \"user\", \"content\": f\"{user_query}\"}\n", + " ]\n", + " \n", + " mcp_server_tools = klavis_client.mcp_server.list_tools(\n", + " server_url=mcp_server_url,\n", + " format=ToolFormat.ANTHROPIC,\n", + " )\n", + " \n", + " response = claude_client.messages.create(\n", + " model=\"claude-sonnet-4-20250514\",\n", + " max_tokens=4000,\n", + " system=\"You are a helpful assistant. Use the available tools to answer the user's question.\",\n", + " messages=messages,\n", + " tools=mcp_server_tools.tools\n", + " )\n", + " \n", + " messages.append({\"role\": \"assistant\", \"content\": response.content})\n", + "\n", + " if response.stop_reason == \"tool_use\":\n", + " tool_results = []\n", + " \n", + " for content_block in response.content:\n", + " if content_block.type == \"tool_use\":\n", + " function_name = content_block.name\n", + " function_args = content_block.input\n", + " \n", + " print(f\"\ud83d\udd27 Calling: {function_name}, with args: {function_args}\")\n", + " \n", + " result = klavis_client.mcp_server.call_tools(\n", + " server_url=mcp_server_url,\n", + " tool_name=function_name,\n", + " tool_args=function_args,\n", + " )\n", + " \n", + " tool_results.append({\n", + " \"type\": \"tool_result\",\n", + " \"tool_use_id\": content_block.id,\n", + " \"content\": str(result)\n", + " })\n", + " \n", + " messages.append({\"role\": \"user\", \"content\": tool_results})\n", + " \n", + " final_response = claude_client.messages.create(\n", + " model=\"claude-3-5-sonnet-20241022\",\n", + " max_tokens=4000,\n", + " system=\"You are a helpful assistant. Use the available tools to answer the user's question.\",\n", + " messages=messages,\n", + " tools=mcp_server_tools.tools\n", + " )\n", + " \n", + " return final_response.content[0].text\n", + " else:\n", + " return response.content[0].text\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 3 - Summarize your favorite video!\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd27 Calling: get_youtube_video_transcript, with args: {'url': 'https://www.youtube.com/watch?v=LCEmiRjPEtQ'}\n", + "Based on the video details, I'll provide a comprehensive summary of Andrej Karpathy's keynote speech \"Software Is Changing (Again)\" at AI Startup School. Here's a breakdown by key segments:\n", + "\n", + "1. Introduction and Software Evolution (0:00-04:40)\n", + "- The talk begins with an overview of how software is fundamentally changing\n", + "- Discusses the evolution from Software 1.0 to 3.0\n", + "- Karpathy positions this as a major paradigm shift in software development\n", + "\n", + "2. Programming in English & Software 3.0 (04:40-06:10)\n", + "- Introduces the concept of programming in natural language\n", + "- Explains how LLMs are changing the programming paradigm\n", + "- Highlights the transition to Software 3.0 where English becomes the programming interface\n", + "\n", + "3. LLMs as Computing Infrastructure (06:10-14:39)\n", + "- Discusses three key aspects of LLMs:\n", + " - As utilities\n", + " - As fabrication facilities (fabs)\n", + " - As operating systems\n", + "- Draws historical computing analogies, comparing current state to 1960s computing\n", + "\n", + "4. LLM Psychology (14:39-18:22)\n", + "- Describes LLMs as \"people spirits\"\n", + "- Explores their nature as stochastic simulations of people\n", + "- Discusses their capabilities and limitations\n", + "- Explains how they exhibit both superhuman abilities and human-like fallibilities\n", + "\n", + "5. Practical Applications (18:22-29:06)\n", + "- Covers designing LLM applications with partial autonomy\n", + "- Emphasizes the importance of human-AI collaboration\n", + "- Shares lessons learned from Tesla Autopilot\n", + "- Introduces the concept of \"autonomy sliders\"\n", + "- Uses the Iron Man analogy to explain augmentation vs. agents approach\n", + "\n", + "6. Democratization of Programming (29:06-33:39)\n", + "- Introduces \"Vibe Coding\" concept\n", + "- Explains how LLMs make programming accessible to everyone\n", + "- Discusses the implications of natural language programming\n", + "\n", + "7. Future Outlook (33:39-38:14)\n", + "- Focuses on building for agents\n", + "- Discusses future-ready digital infrastructure\n", + "- Explains how to prepare for the next generation of software\n", + "\n", + "8. Conclusion (38:14-end)\n", + "- Summarizes key points\n", + "- Emphasizes that we're in the early stages (equivalent to 1960s) of LLM development\n", + "- Calls to action for builders and developers\n", + "\n", + "Key Takeaways:\n", + "- Software is undergoing a fundamental transformation with LLMs\n", + "- Natural language is becoming the new programming interface\n", + "- We're in the early stages of this technology, comparable to 1960s computing\n", + "- LLMs have unique psychological properties that influence how we should work with them\n", + "- The future of software development will be more accessible to non-programmers\n", + "- There's a need to build infrastructure that's ready for AI agents\n", + "\n", + "The talk includes several valuable references and links, including Karpathy's original \"Software 2.0\" blog post from 2017 and recent writings on vibe coding and technology diffusion.\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=LCEmiRjPEtQ\" # pick a video you like!\n", + "\n", + "result = claude_with_mcp_server(\n", + " mcp_server_url=youtube_mcp_instance.server_url, \n", + " user_query=f\"Please provide a complete summary of this YouTube video with timestamp: {YOUTUBE_VIDEO_URL}\"\n", + ")\n", + "\n", + "print(result)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "\u2705 Great! You've successfully created an AI agent that uses Claude's tool use with Klavis MCP servers to summarize YouTube videos!\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 2 : Claude + Gmail MCP Server (OAuth needed)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: https://api.klavis.ai/oauth/gmail/authorize?instance_id=d9d482b3-433a-4330-9a8b-9548c0b0a326\n" + ] + } + ], + "source": [ + "import webbrowser\n", + "\n", + "gmail_mcp_server = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "webbrowser.open(gmail_mcp_server.oauth_url)\n", + "\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: {gmail_mcp_server.oauth_url}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd27 Calling: send_email, with args: {'to': ['zihaolin@klavis.ai'], 'subject': 'Test Claude + Gmail MCP Server', 'body': 'Hello World from Claude!'}\n", + "I've sent the email as requested with:\n", + "- To: zihaolin@klavis.ai\n", + "- Subject: Test Claude + Gmail MCP Server\n", + "- Body: Hello World from Claude!\n", + "\n", + "The email was sent successfully. Is there anything else you'd like me to help you with?\n" + ] + } + ], + "source": [ + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # Replace with your email\n", + "EMAIL_SUBJECT = \"Test Claude + Gmail MCP Server\"\n", + "EMAIL_BODY = \"Hello World from Claude!\"\n", + "\n", + "result = claude_with_mcp_server(\n", + " mcp_server_url=gmail_mcp_server.server_url, \n", + " user_query=f\"Please send an email to {EMAIL_RECIPIENT} with subject {EMAIL_SUBJECT} and body {EMAIL_BODY}\"\n", + ")\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate Anthropic's Claude with tool use capabilities with Klavis MCP servers to create powerful AI applications. We covered two practical examples:\n", + "\n", + "**\ud83c\udfa5 YouTube Integration**: Built an AI assistant that can automatically summarize YouTube videos by extracting transcripts and providing detailed, timestamped summaries.\n", + "\n", + "**\ud83d\udce7 Gmail Integration**: Created an AI-powered email assistant that can send emails through Gmail with OAuth authentication.\n", + "\n", + "### Key Takeaways:\n", + "- **Easy Setup**: Klavis MCP servers can be created with just a few lines of code\n", + "- **Claude Compatible**: All tools are formatted for seamless Claude tool use\n", + "- **Versatile**: Support for both simple APIs (YouTube) and OAuth-authenticated services (Gmail)\n", + "- **Scalable**: The same pattern can be applied to any of the MCP servers available in Klavis\n", + "- **Advanced Reasoning**: Claude's superior reasoning capabilities make it excellent for complex analysis tasks\n", + "\n", + "**Happy building!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/crewai/Salesforce_Gmail_CrewAI_Integration.ipynb b/examples/crewai/Salesforce_Gmail_CrewAI_Integration.ipynb new file mode 100644 index 00000000..0323e6c2 --- /dev/null +++ b/examples/crewai/Salesforce_Gmail_CrewAI_Integration.ipynb @@ -0,0 +1,1263 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/crewai/Salesforce_Gmail_CrewAI_Integration.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# CrewAI with Klavis Salesforce + Gmail MCP Server\n", + "\n", + "This tutorial demonstrates how to create a powerful AI crew that manages Salesforce opportunities and sends follow-up emails through Gmail using CrewAI with Klavis MCP servers.\n", + "\n", + "## Use Case\n", + "- **Salesforce Agent**: Finds opportunities with pending next steps\n", + "- **Email Agent**: Drafts professional follow-up emails based on Salesforce data\n", + "- **Workflow**: Automated opportunity management and email communication\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n", + "- **Salesforce Account**\n", + "- **Gmail Account**\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU crewai 'crewai-tools[mcp]' klavis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 1: Initialize Klavis Client\n", + "\n", + "First, let's set up the Klavis client to create our MCP server instances.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Klavis client initialized successfully\n" + ] + } + ], + "source": [ + "from klavis import Klavis\n", + "\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 2: Create Salesforce MCP Server\n", + "\n", + "Create a Salesforce MCP server instance and complete OAuth authentication.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd10 Opening OAuth authorization for Salesforce...\n" + ] + } + ], + "source": [ + "import webbrowser\n", + "from klavis.types import McpServerName\n", + "\n", + "# Create Salesforce MCP Server instance\n", + "salesforce_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.SALESFORCE,\n", + " user_id=\"1234\", \n", + ")\n", + "# Open OAuth URL in browser\n", + "webbrowser.open(salesforce_mcp_instance.oauth_url)\n", + "\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Salesforce...\")\n", + "# print(f\"\ud83d\udcf1 If not redirected automatically, open: {salesforce_mcp_instance.oauth_url}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 3: Create Gmail MCP Server\n", + "\n", + "Create a Gmail MCP server instance and complete OAuth authentication.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Create Gmail MCP Server instance\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "# Open OAuth URL in browser\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "\n", + "# print(f\"\ud83d\udd17 Gmail MCP server created at: {gmail_mcp_instance.server_url}\")\n", + "# print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail...\")\n", + "# print(f\"\ud83d\udcf1 If not redirected automatically, open: {gmail_mcp_instance.oauth_url}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 4: Configure MCP Server Adapters\n", + "\n", + "Set up the MCP server adapters for CrewAI integration.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/pydantic/fields.py:1093: PydanticDeprecatedSince20: Using extra keyword arguments on `Field` is deprecated and will be removed. Use `json_schema_extra` instead. (Extra keys: 'items', 'anyOf', 'enum', 'properties'). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.11/migration/\n", + " warn(\n" + ] + } + ], + "source": [ + "from crewai_tools import MCPServerAdapter\n", + "# Configure MCP server adapters\n", + "salesforce_mcp_server = MCPServerAdapter({\n", + " \"url\": salesforce_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + "})\n", + "\n", + "gmail_mcp_server = MCPServerAdapter({\n", + " \"url\": gmail_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + "})\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 5: Create CrewAI Agents and Tasks\n", + "\n", + "Define specialized agents for Salesforce opportunity management and email communication. Create specific tasks for opportunity analysis and email drafting." + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [], + "source": [ + "from crewai import Agent, Task, Crew, Process\n", + "# Create Salesforce Opportunity Analyst Agent\n", + "salesforce_agent = Agent(\n", + " role=\"Salesforce Opportunity Analyst\",\n", + " goal=\"Find opportunities with pending next steps and extract relevant contact information\",\n", + " backstory=\"You are an expert at analyzing Salesforce data to identify opportunities that need follow-up actions. You excel at extracting opportunity details, next step information, and contact data for effective sales management.\",\n", + " tools=salesforce_mcp_server.tools, \n", + " verbose=False,\n", + ")\n", + "\n", + "# Create Email Communication Specialist Agent\n", + "email_agent = Agent(\n", + " role=\"Email Communication Specialist\",\n", + " goal=\"Draft professional follow-up emails based on Salesforce opportunity next steps\",\n", + " backstory=\"You are an expert at crafting complete, professional follow-up emails without placeholder content. You always write proper signatures and complete email content based on Salesforce opportunity data and next step requirements.\",\n", + " tools=gmail_mcp_server.tools, \n", + " verbose=False,\n", + ")\n", + "\n", + "# Define Salesforce Analysis Task\n", + "salesforce_task = Task(\n", + " description=\"Find CrewAI opportunitity with next steps. Extract opportunity name, next step details, and contact emails. Focus on opportunities that require immediate follow-up actions.\",\n", + " expected_output=\"Comprehensive list of opportunities with next steps and contact information, including opportunity names, next step descriptions, contact emails, and priority levels\",\n", + " agent=salesforce_agent\n", + ")\n", + "\n", + "# Define Email Drafting Task\n", + "email_task = Task(\n", + " description=\"Based on the Salesforce opportunities analysis, draft professional follow-up emails for each opportunity. Include opportunity context, clear next actions, and professional tone. Ensure emails are complete and ready for sending.\",\n", + " expected_output=\"Draft emails with complete content ready for review and sending, including subject lines, professional greetings, opportunity context, next steps, and proper signatures\",\n", + " agent=email_agent,\n", + " context=[salesforce_task] # This task depends on the salesforce_task output\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 6: Execute the Crew\n", + "\n", + "Create and run the multi-service crew to analyze Salesforce opportunities and draft follow-up emails.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Crew Execution Started \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Crew Execution Started                                                                                         \u2502\n",
+       "\u2502  Name: crew                                                                                                     \u2502\n",
+       "\u2502  ID: 6f02408e-8092-49b9-b1ed-fdce83e0a944                                                                       \u2502\n",
+       "\u2502  Tool Args:                                                                                                     \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[36m\u256d\u2500\u001b[0m\u001b[36m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[36m Crew Execution Started \u001b[0m\u001b[36m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[36m\u2500\u256e\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[1;36mCrew Execution Started\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[37mName: \u001b[0m\u001b[36mcrew\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[37mID: \u001b[0m\u001b[36m6f02408e-8092-49b9-b1ed-fdce83e0a944\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[37mTool Args: \u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2502\u001b[0m \u001b[36m\u2502\u001b[0m\n", + "\u001b[36m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \ud83e\udd16 Agent Started \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Salesforce Opportunity Analyst                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Task: Find CrewAI opportunitity with next steps. Extract opportunity name, next step details, and contact      \u2502\n",
+       "\u2502  emails. Focus on opportunities that require immediate follow-up actions.                                       \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[35m\u256d\u2500\u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m \ud83e\udd16 Agent Started \u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m\u2500\u256e\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mSalesforce Opportunity Analyst\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mTask: \u001b[0m\u001b[92mFind CrewAI opportunitity with next steps. Extract opportunity name, next step details, and contact \u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[92memails. Focus on opportunities that require immediate follow-up actions.\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n",
+       "for Jupyter support\n",
+       "  warnings.warn('install \"ipywidgets\" for Jupyter support')\n",
+       "
\n" + ], + "text/plain": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n", + "for Jupyter support\n", + " warnings.warn('install \"ipywidgets\" for Jupyter support')\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \ud83d\udd27 Agent Tool Execution \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Salesforce Opportunity Analyst                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Thought: Action: salesforce_get_opportunities                                                                  \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Using Tool: salesforce_get_opportunities                                                                       \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[35m\u256d\u2500\u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m \ud83d\udd27 Agent Tool Execution \u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m\u2500\u256e\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mSalesforce Opportunity Analyst\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mThought: \u001b[0m\u001b[92mAction: salesforce_get_opportunities\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mUsing Tool: \u001b[0m\u001b[1;92msalesforce_get_opportunities\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Input \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  \"{\\\"name_contains\\\": \\\"CrewAI\\\", \\\"fields\\\": [\\\"Name\\\", \\\"NextStep\\\", \\\"AccountId\\\"]}\"                         \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[34m\u256d\u2500\u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m Tool Input \u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m\u2500\u256e\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[38;2;230;219;116;49m\"{\\\"name_contains\\\": \\\"CrewAI\\\", \\\"fields\\\": [\\\"Name\\\", \\\"NextStep\\\", \\\"AccountId\\\"]}\"\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Output \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  {                                                                                                              \u2502\n",
+       "\u2502    \"totalSize\": 1,                                                                                              \u2502\n",
+       "\u2502    \"done\": true,                                                                                                \u2502\n",
+       "\u2502    \"records\": [                                                                                                 \u2502\n",
+       "\u2502      {                                                                                                          \u2502\n",
+       "\u2502        \"attributes\": {                                                                                          \u2502\n",
+       "\u2502          \"type\": \"Opportunity\",                                                                                 \u2502\n",
+       "\u2502          \"url\": \"/services/data/v59.0/sobjects/Opportunity/006fK000005jXTFQA2\"                                  \u2502\n",
+       "\u2502        },                                                                                                       \u2502\n",
+       "\u2502        \"Name\": \"CrewAI <> Klavis\",                                                                              \u2502\n",
+       "\u2502        \"NextStep\": \"schedule a meeting on 06/30 9AM PST\",                                                       \u2502\n",
+       "\u2502        \"AccountId\": \"001fK00000IBPlXQAX\"                                                                        \u2502\n",
+       "\u2502      }                                                                                                          \u2502\n",
+       "\u2502    ]                                                                                                            \u2502\n",
+       "\u2502  }                                                                                                              \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Tool Output \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m{\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"totalSize\": 1,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"done\": true,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"records\": [\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m {\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"attributes\": {\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"type\": \"Opportunity\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"url\": \"/services/data/v59.0/sobjects/Opportunity/006fK000005jXTFQA2\"\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m },\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"Name\": \"CrewAI <> Klavis\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"NextStep\": \"schedule a meeting on 06/30 9AM PST\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"AccountId\": \"001fK00000IBPlXQAX\"\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m }\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m ]\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m}\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n",
+       "for Jupyter support\n",
+       "  warnings.warn('install \"ipywidgets\" for Jupyter support')\n",
+       "
\n" + ], + "text/plain": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n", + "for Jupyter support\n", + " warnings.warn('install \"ipywidgets\" for Jupyter support')\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \ud83d\udd27 Agent Tool Execution \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Salesforce Opportunity Analyst                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Thought: Action: salesforce_get_contacts                                                                       \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Using Tool: salesforce_get_contacts                                                                            \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[35m\u256d\u2500\u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m \ud83d\udd27 Agent Tool Execution \u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m\u2500\u256e\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mSalesforce Opportunity Analyst\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mThought: \u001b[0m\u001b[92mAction: salesforce_get_contacts\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mUsing Tool: \u001b[0m\u001b[1;92msalesforce_get_contacts\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Input \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  \"{\\\"account_id\\\": \\\"001fK00000IBPlXQAX\\\", \\\"fields\\\": [\\\"Email\\\"], \\\"limit\\\": 50}\"                             \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[34m\u256d\u2500\u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m Tool Input \u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m\u2500\u256e\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[38;2;230;219;116;49m\"{\\\"account_id\\\": \\\"001fK00000IBPlXQAX\\\", \\\"fields\\\": [\\\"Email\\\"], \\\"limit\\\": 50}\"\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Output \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  {                                                                                                              \u2502\n",
+       "\u2502    \"totalSize\": 1,                                                                                              \u2502\n",
+       "\u2502    \"done\": true,                                                                                                \u2502\n",
+       "\u2502    \"records\": [                                                                                                 \u2502\n",
+       "\u2502      {                                                                                                          \u2502\n",
+       "\u2502        \"attributes\": {                                                                                          \u2502\n",
+       "\u2502          \"type\": \"Contact\",                                                                                     \u2502\n",
+       "\u2502          \"url\": \"/services/data/v59.0/sobjects/Contact/003fK000009uHNdQAM\"                                      \u2502\n",
+       "\u2502        },                                                                                                       \u2502\n",
+       "\u2502        \"Email\": \"joaomdmoura@crewai.com\"                                                                        \u2502\n",
+       "\u2502      }                                                                                                          \u2502\n",
+       "\u2502    ]                                                                                                            \u2502\n",
+       "\u2502  }                                                                                                              \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Tool Output \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m{\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"totalSize\": 1,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"done\": true,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"records\": [\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m {\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"attributes\": {\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"type\": \"Contact\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"url\": \"/services/data/v59.0/sobjects/Contact/003fK000009uHNdQAM\"\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m },\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"Email\": \"joaomdmoura@crewai.com\"\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m }\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m ]\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m}\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \u2705 Agent Final Answer \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Salesforce Opportunity Analyst                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Final Answer:                                                                                                  \u2502\n",
+       "\u2502  {                                                                                                              \u2502\n",
+       "\u2502    \"Opportunities\": [                                                                                           \u2502\n",
+       "\u2502      {                                                                                                          \u2502\n",
+       "\u2502        \"OpportunityName\": \"CrewAI <> Klavis\",                                                                   \u2502\n",
+       "\u2502        \"NextStep\": \"schedule a meeting on 06/30 9AM PST\",                                                       \u2502\n",
+       "\u2502        \"ContactEmail\": \"joaomdmoura@crewai.com\"                                                                 \u2502\n",
+       "\u2502      }                                                                                                          \u2502\n",
+       "\u2502    ]                                                                                                            \u2502\n",
+       "\u2502  }                                                                                                              \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m \u2705 Agent Final Answer \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mSalesforce Opportunity Analyst\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mFinal Answer:\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m{\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"Opportunities\": [\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m {\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"OpportunityName\": \"CrewAI <> Klavis\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"NextStep\": \"schedule a meeting on 06/30 9AM PST\",\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m \"ContactEmail\": \"joaomdmoura@crewai.com\"\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m }\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m ]\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m}\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Task Completion \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Task Completed                                                                                                 \u2502\n",
+       "\u2502  Name: 3fd10914-4c4a-4da2-9375-d18a069ecc55                                                                     \u2502\n",
+       "\u2502  Agent: Salesforce Opportunity Analyst                                                                          \u2502\n",
+       "\u2502  Tool Args:                                                                                                     \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Task Completion \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[1;32mTask Completed\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mName: \u001b[0m\u001b[32m3fd10914-4c4a-4da2-9375-d18a069ecc55\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[32mSalesforce Opportunity Analyst\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mTool Args: \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \ud83e\udd16 Agent Started \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Email Communication Specialist                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Task: Based on the Salesforce opportunities analysis, draft professional follow-up emails for each             \u2502\n",
+       "\u2502  opportunity. Include opportunity context, clear next actions, and professional tone. Ensure emails are         \u2502\n",
+       "\u2502  complete and ready for sending.                                                                                \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[35m\u256d\u2500\u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m \ud83e\udd16 Agent Started \u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m\u2500\u256e\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mEmail Communication Specialist\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mTask: \u001b[0m\u001b[92mBased on the Salesforce opportunities analysis, draft professional follow-up emails for each \u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[92mopportunity. Include opportunity context, clear next actions, and professional tone. Ensure emails are \u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[92mcomplete and ready for sending.\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n",
+       "for Jupyter support\n",
+       "  warnings.warn('install \"ipywidgets\" for Jupyter support')\n",
+       "
\n" + ], + "text/plain": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n", + "for Jupyter support\n", + " warnings.warn('install \"ipywidgets\" for Jupyter support')\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \ud83d\udd27 Agent Tool Execution \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Email Communication Specialist                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Thought: I need to draft a professional follow-up email for the opportunity \"CrewAI <> Klavis\" that includes   \u2502\n",
+       "\u2502  the opportunity context, the next step which is scheduling a meeting, and ensure it has a proper greeting,     \u2502\n",
+       "\u2502  subject line, and signature.                                                                                   \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Using Tool: draft_email                                                                                        \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[35m\u256d\u2500\u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m \ud83d\udd27 Agent Tool Execution \u001b[0m\u001b[35m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[35m\u2500\u256e\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mEmail Communication Specialist\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mThought: \u001b[0m\u001b[92mI need to draft a professional follow-up email for the opportunity \"CrewAI <> Klavis\" that includes \u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[92mthe opportunity context, the next step which is scheduling a meeting, and ensure it has a proper greeting, \u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[92msubject line, and signature.\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[37mUsing Tool: \u001b[0m\u001b[1;92mdraft_email\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2502\u001b[0m \u001b[35m\u2502\u001b[0m\n", + "\u001b[35m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Input \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  \"{\\\"to\\\": [\\\"joaomdmoura@crewai.com\\\"], \\\"subject\\\": \\\"Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opp  \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[34m\u256d\u2500\u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m Tool Input \u001b[0m\u001b[34m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[34m\u2500\u256e\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[38;2;230;219;116;49m\"{\\\"to\\\": [\\\"joaomdmoura@crewai.com\\\"], \\\"subject\\\": \\\"Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opp\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2502\u001b[0m \u001b[34m\u2502\u001b[0m\n", + "\u001b[34m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Tool Output \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Email draft created successfully with ID: r-6581095388079430028                                                \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Tool Output \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mEmail draft created successfully with ID: r-6581095388079430028\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \u2705 Agent Final Answer \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Agent: Email Communication Specialist                                                                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Final Answer:                                                                                                  \u2502\n",
+       "\u2502  Subject: Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opportunity                                        \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Dear Jo\u00e3o Moura,                                                                                               \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  I hope this message finds you well. I am following up regarding our ongoing collaboration on the CrewAI <>     \u2502\n",
+       "\u2502  Klavis opportunity. I would like to schedule a meeting to discuss the next steps and further align our         \u2502\n",
+       "\u2502  efforts.                                                                                                       \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Could we schedule the meeting for June 30th at 9 AM PST? Please let me know if this time works for you or if   \u2502\n",
+       "\u2502  there are any other slots that you would prefer.                                                               \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Looking forward to your reply.                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Best regards,                                                                                                  \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  [Your Name]                                                                                                    \u2502\n",
+       "\u2502  [Your Position]                                                                                                \u2502\n",
+       "\u2502  [Your Company]                                                                                                 \u2502\n",
+       "\u2502  [Your Phone Number]                                                                                            \u2502\n",
+       "\u2502  [Your Email Address]                                                                                           \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m \u2705 Agent Final Answer \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[1;92mEmail Communication Specialist\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mFinal Answer:\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mSubject: Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opportunity\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mDear Jo\u00e3o Moura,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mI hope this message finds you well. I am following up regarding our ongoing collaboration on the CrewAI <> \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mKlavis opportunity. I would like to schedule a meeting to discuss the next steps and further align our \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mefforts.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mCould we schedule the meeting for June 30th at 9 AM PST? Please let me know if this time works for you or if \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mthere are any other slots that you would prefer.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mLooking forward to your reply.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92mBest regards,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m[Your Name] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m[Your Position] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m[Your Company] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m[Your Phone Number] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[92m[Your Email Address]\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n"
+      ],
+      "text/plain": []
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Task Completion \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Task Completed                                                                                                 \u2502\n",
+       "\u2502  Name: f80c6d70-1715-4a60-ada0-2f7726d6b727                                                                     \u2502\n",
+       "\u2502  Agent: Email Communication Specialist                                                                          \u2502\n",
+       "\u2502  Tool Args:                                                                                                     \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Task Completion \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[1;32mTask Completed\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mName: \u001b[0m\u001b[32mf80c6d70-1715-4a60-ada0-2f7726d6b727\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mAgent: \u001b[0m\u001b[32mEmail Communication Specialist\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mTool Args: \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Crew Completion \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Crew Execution Completed                                                                                       \u2502\n",
+       "\u2502  Name: crew                                                                                                     \u2502\n",
+       "\u2502  ID: 6f02408e-8092-49b9-b1ed-fdce83e0a944                                                                       \u2502\n",
+       "\u2502  Tool Args:                                                                                                     \u2502\n",
+       "\u2502  Final Output: Subject: Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opportunity                          \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Dear Jo\u00e3o Moura,                                                                                               \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  I hope this message finds you well. I am following up regarding our ongoing collaboration on the CrewAI <>     \u2502\n",
+       "\u2502  Klavis opportunity. I would like to schedule a meeting to discuss the next steps and further align our         \u2502\n",
+       "\u2502  efforts.                                                                                                       \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Could we schedule the meeting for June 30th at 9 AM PST? Please let me know if this time works for you or if   \u2502\n",
+       "\u2502  there are any other slots that you would prefer.                                                               \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Looking forward to your reply.                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  Best regards,                                                                                                  \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502  [Your Name]                                                                                                    \u2502\n",
+       "\u2502  [Your Position]                                                                                                \u2502\n",
+       "\u2502  [Your Company]                                                                                                 \u2502\n",
+       "\u2502  [Your Phone Number]                                                                                            \u2502\n",
+       "\u2502  [Your Email Address]                                                                                           \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2502                                                                                                                 \u2502\n",
+       "\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[32m\u256d\u2500\u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m Crew Completion \u001b[0m\u001b[32m\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u001b[0m\u001b[32m\u2500\u256e\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[1;32mCrew Execution Completed\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mName: \u001b[0m\u001b[32mcrew\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mID: \u001b[0m\u001b[32m6f02408e-8092-49b9-b1ed-fdce83e0a944\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mTool Args: \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mFinal Output: Subject: Follow-Up: Meeting Scheduling for CrewAI <> Klavis Opportunity\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mDear Jo\u00e3o Moura,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mI hope this message finds you well. I am following up regarding our ongoing collaboration on the CrewAI <> \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mKlavis opportunity. I would like to schedule a meeting to discuss the next steps and further align our \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mefforts.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mCould we schedule the meeting for June 30th at 9 AM PST? Please let me know if this time works for you or if \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mthere are any other slots that you would prefer.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mLooking forward to your reply.\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37mBest regards,\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37m[Your Name] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37m[Your Position] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37m[Your Company] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37m[Your Phone Number] \u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[37m[Your Email Address]\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2502\u001b[0m \u001b[32m\u2502\u001b[0m\n", + "\u001b[32m\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Resources cleaned up successfully\n" + ] + } + ], + "source": [ + "try:\n", + " # Create the crew\n", + " salesforce_gmail_crew = Crew(\n", + " agents=[salesforce_agent, email_agent],\n", + " tasks=[salesforce_task, email_task],\n", + " process=Process.sequential,\n", + " verbose=True, # Set to False to reduce output\n", + " )\n", + " # Execute the crew\n", + " salesforce_gmail_crew.kickoff()\n", + " \n", + "except Exception as e:\n", + " print(f\"\u274c Error during crew execution: {e}\")\n", + " \n", + "finally:\n", + " # Clean up resources\n", + " salesforce_mcp_server.stop()\n", + " gmail_mcp_server.stop()\n", + " print(\"Resources cleaned up successfully\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This notebook demonstrated how to create a powerful Salesforce + Gmail integration using CrewAI with Klavis MCP servers.\n", + "\n", + "### \ud83c\udfaf **What We Accomplished:**\n", + "\n", + "1. **Multi-Service Integration**: Connected Salesforce and Gmail through MCP servers\n", + "2. **Specialized Agents**: Created focused agents for opportunity analysis and email communication\n", + "3. **Sequential Workflow**: Implemented a workflow where Salesforce analysis feeds into email drafting\n", + "4. **OAuth Authentication**: Handled secure authentication for both services\n", + "5. **Professional Output**: Generated actionable insights and ready-to-send emails\n", + "\n", + "### \ud83c\udf93 **Next Steps:**\n", + "- Customize agent personalities and instructions for your use case\n", + "- Add more MCP servers for comprehensive workflow automation\n", + "- Implement scheduling and monitoring for production deployment\n", + "- Explore advanced CrewAI features like memory and planning\n", + "\n", + "**Happy building!** \ud83d\ude80\ud83d\udcca\ud83d\udce7\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/crewai/Use_Klavis_with_CrewAI.ipynb b/examples/crewai/Use_Klavis_with_CrewAI.ipynb new file mode 100644 index 00000000..8de3f967 --- /dev/null +++ b/examples/crewai/Use_Klavis_with_CrewAI.ipynb @@ -0,0 +1,705 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/crewai/Use_Klavis_with_CrewAI.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# CrewAI + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to use CrewAI with Klavis MCP (Model Context Protocol) servers to create powerful AI agent crews with access to external tools and services.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU crewai 'crewai-tools[mcp]' klavis openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from crewai import Agent, Task, Crew, Process\n", + "from crewai_tools import MCPServerAdapter\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 1: YouTube Research Crew\n", + "\n", + "Let's create a CrewAI crew that can research YouTube videos using Klavis MCP servers.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 1 - Create YouTube MCP Server using Klavis\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd17 YouTube MCP server created at: https://youtube-mcp-server.klavis.ai/mcp/?instance_id=270cbd51-e737-407d-85ce-6e6162248671\n", + "\ud83d\udcdd Instance ID: 270cbd51-e737-407d-85ce-6e6162248671\n" + ] + } + ], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "print(f\"\ud83d\udd17 YouTube MCP server created at: {youtube_mcp_instance.server_url}\")\n", + "print(f\"\ud83d\udcdd Instance ID: {youtube_mcp_instance.instance_id}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - Create CrewAI Agents with MCP Tools\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/pydantic/fields.py:1093: PydanticDeprecatedSince20: Using extra keyword arguments on `Field` is deprecated and will be removed. Use `json_schema_extra` instead. (Extra keys: 'items', 'anyOf', 'enum', 'properties'). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.11/migration/\n", + " warn(\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Available tools from YouTube MCP server: ['get_youtube_video_transcript']\n" + ] + }, + { + "data": { + "text/html": [ + "
/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n",
+       "for Jupyter support\n",
+       "  warnings.warn('install \"ipywidgets\" for Jupyter support')\n",
+       "
\n" + ], + "text/plain": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n", + "for Jupyter support\n", + " warnings.warn('install \"ipywidgets\" for Jupyter support')\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
```\n",
+       "# YouTube Video Analysis: Andrej Karpathy - Software Is Changing (Again)\n",
+       "\n",
+       "## Video Details\n",
+       "- **Title:** Andrej Karpathy: Software Is Changing (Again)\n",
+       "- **Channel:** Y Combinator\n",
+       "- **Published On:** June 19, 2025\n",
+       "- **Duration:** 39:32\n",
+       "- **View Count:** 1,035,413\n",
+       "- **Like Count:** 33,783\n",
+       "- **Comment Count:** 750\n",
+       "- **Thumbnail:** ![Thumbnail](https://i.ytimg.com/vi/LCEmiRjPEtQ/hqdefault.jpg)\n",
+       "\n",
+       "## Key Insights Summary\n",
+       "\n",
+       "### Introduction\n",
+       "- **[00:00 - Intro]**  \n",
+       "  Andrej Karpathy introduces the topic of the keynote, discussing the evolution of software and the significance of\n",
+       "new technological paradigms.\n",
+       "\n",
+       "### Software Evolution: From 1.0 to 3.0\n",
+       "- **[01:25 - Software evolution: From 1.0 to 3.0]**  \n",
+       "  Karpathy explains the major shifts in software development, highlighting that we are transitioning into \"Software\n",
+       "3.0,\" where natural language processing (NLP) takes center stage.\n",
+       "\n",
+       "### Rise of Software 3.0\n",
+       "- **[04:40 - Programming in English: Rise of Software 3.0]**  \n",
+       "  In this segment, he emphasizes how the use of language models (LLMs) allows programmers to interact with software\n",
+       "using English, democratizing programming.\n",
+       "\n",
+       "### LLMs as Utilities\n",
+       "- **[06:10 - LLMs as utilities, fabs, and operating systems]**  \n",
+       "  Karpathy compares LLMs to utilities and operating systems, proposing that they will be foundational to future \n",
+       "software infrastructure.\n",
+       "\n",
+       "### New LLM OS\n",
+       "- **[11:04 - The new LLM OS and historical computing analogies]**  \n",
+       "  He discusses how current developments resemble the 1960s in computing history, pointing out the potential for new\n",
+       "forms of computing to emerge.\n",
+       "\n",
+       "### Psychology of LLMs\n",
+       "- **[14:39 - Psychology of LLMs: People spirits and cognitive quirks]**  \n",
+       "  The discussion shifts to the psychological aspects of LLMs, referring to them as \"people spirits\" that simulate \n",
+       "human-like capabilities while retaining unique flaws.\n",
+       "\n",
+       "### Designing LLM Apps\n",
+       "- **[18:22 - Designing LLM apps with partial autonomy]**  \n",
+       "  Karpathy suggests that LLM technology opens avenues for developing applications with varying degrees of autonomy.\n",
+       "\n",
+       "### Human-AI Collaboration\n",
+       "- **[23:40 - The importance of human-AI collaboration loops]**  \n",
+       "  Emphasizing the need for effective collaboration between humans and AI applications, he outlines strategies for \n",
+       "fostering productive interactions.\n",
+       "\n",
+       "### Lessons from Tesla Autopilot\n",
+       "- **[26:00 - Lessons from Tesla Autopilot & autonomy sliders]**  \n",
+       "  He shares lessons learned from Tesla\u2019s Autopilot system, detailing the implications for software development and \n",
+       "user experience.\n",
+       "\n",
+       "### The Iron Man Analogy\n",
+       "- **[27:52 - The Iron Man analogy: Augmentation vs. agents]**  \n",
+       "  The comparison of LLMs to Iron Man's suit illustrates the concept of augmentation, emphasizing the potential for \n",
+       "enhanced human capabilities.\n",
+       "\n",
+       "### Vibe Coding\n",
+       "- **[29:06 - Vibe Coding: Everyone is now a programmer]**  \n",
+       "  Karpathy explains 'vibe coding,' a concept where increased accessibility to programming via LLMs allows more \n",
+       "people to engage in software development.\n",
+       "\n",
+       "### Future-Ready Digital Infrastructure\n",
+       "- **[33:39 - Building for agents: Future-ready digital infrastructure]**  \n",
+       "  He discusses the need to build a resilient and adaptable digital infrastructure that supports LLM-driven \n",
+       "applications and agents.\n",
+       "\n",
+       "### Conclusion\n",
+       "- **[38:14 - Summary: We\u2019re in the 1960s of LLMs \u2014 time to build]**  \n",
+       "  In conclusion, he reflects on the current state of LLMs, advocating for innovation and building a new kind of \n",
+       "computer that embraces the possibilities of software 3.0.\n",
+       "\n",
+       "## Main Takeaways\n",
+       "- The transition to Software 3.0 marks a significant shift where natural language processing changes how we \n",
+       "interact with technology.\n",
+       "- LLMs have the potential to democratize programming, making it accessible to a broader audience.\n",
+       "- Collaborative human-AI systems will define the future of software development, with a focus on adapting to new \n",
+       "paradigms.\n",
+       "- Historical computing analogies indicate that we are in the early stages of a transformative era, comparable to \n",
+       "the 1960s in computing.\n",
+       "\n",
+       "## Additional Links\n",
+       "- [Software 2.0 Blog Post](https://karpathy.medium.com/software-2-0-a64152b37c35)\n",
+       "- [LLMs Flip the Script on Technology Diffusion](https://karpathy.bearblog.dev/power-to-the-people/)\n",
+       "- [Vibe Coding MenuGen Retrospective](https://karpathy.bearblog.dev/vibe-coding-menugen/)\n",
+       "```\n",
+       "
\n" + ], + "text/plain": [ + "```\n", + "# YouTube Video Analysis: Andrej Karpathy - Software Is Changing (Again)\n", + "\n", + "## Video Details\n", + "- **Title:** Andrej Karpathy: Software Is Changing (Again)\n", + "- **Channel:** Y Combinator\n", + "- **Published On:** June 19, 2025\n", + "- **Duration:** 39:32\n", + "- **View Count:** 1,035,413\n", + "- **Like Count:** 33,783\n", + "- **Comment Count:** 750\n", + "- **Thumbnail:** ![Thumbnail](https://i.ytimg.com/vi/LCEmiRjPEtQ/hqdefault.jpg)\n", + "\n", + "## Key Insights Summary\n", + "\n", + "### Introduction\n", + "- **[00:00 - Intro]** \n", + " Andrej Karpathy introduces the topic of the keynote, discussing the evolution of software and the significance of\n", + "new technological paradigms.\n", + "\n", + "### Software Evolution: From 1.0 to 3.0\n", + "- **[01:25 - Software evolution: From 1.0 to 3.0]** \n", + " Karpathy explains the major shifts in software development, highlighting that we are transitioning into \"Software\n", + "3.0,\" where natural language processing (NLP) takes center stage.\n", + "\n", + "### Rise of Software 3.0\n", + "- **[04:40 - Programming in English: Rise of Software 3.0]** \n", + " In this segment, he emphasizes how the use of language models (LLMs) allows programmers to interact with software\n", + "using English, democratizing programming.\n", + "\n", + "### LLMs as Utilities\n", + "- **[06:10 - LLMs as utilities, fabs, and operating systems]** \n", + " Karpathy compares LLMs to utilities and operating systems, proposing that they will be foundational to future \n", + "software infrastructure.\n", + "\n", + "### New LLM OS\n", + "- **[11:04 - The new LLM OS and historical computing analogies]** \n", + " He discusses how current developments resemble the 1960s in computing history, pointing out the potential for new\n", + "forms of computing to emerge.\n", + "\n", + "### Psychology of LLMs\n", + "- **[14:39 - Psychology of LLMs: People spirits and cognitive quirks]** \n", + " The discussion shifts to the psychological aspects of LLMs, referring to them as \"people spirits\" that simulate \n", + "human-like capabilities while retaining unique flaws.\n", + "\n", + "### Designing LLM Apps\n", + "- **[18:22 - Designing LLM apps with partial autonomy]** \n", + " Karpathy suggests that LLM technology opens avenues for developing applications with varying degrees of autonomy.\n", + "\n", + "### Human-AI Collaboration\n", + "- **[23:40 - The importance of human-AI collaboration loops]** \n", + " Emphasizing the need for effective collaboration between humans and AI applications, he outlines strategies for \n", + "fostering productive interactions.\n", + "\n", + "### Lessons from Tesla Autopilot\n", + "- **[26:00 - Lessons from Tesla Autopilot & autonomy sliders]** \n", + " He shares lessons learned from Tesla\u2019s Autopilot system, detailing the implications for software development and \n", + "user experience.\n", + "\n", + "### The Iron Man Analogy\n", + "- **[27:52 - The Iron Man analogy: Augmentation vs. agents]** \n", + " The comparison of LLMs to Iron Man's suit illustrates the concept of augmentation, emphasizing the potential for \n", + "enhanced human capabilities.\n", + "\n", + "### Vibe Coding\n", + "- **[29:06 - Vibe Coding: Everyone is now a programmer]** \n", + " Karpathy explains 'vibe coding,' a concept where increased accessibility to programming via LLMs allows more \n", + "people to engage in software development.\n", + "\n", + "### Future-Ready Digital Infrastructure\n", + "- **[33:39 - Building for agents: Future-ready digital infrastructure]** \n", + " He discusses the need to build a resilient and adaptable digital infrastructure that supports LLM-driven \n", + "applications and agents.\n", + "\n", + "### Conclusion\n", + "- **[38:14 - Summary: We\u2019re in the 1960s of LLMs \u2014 time to build]** \n", + " In conclusion, he reflects on the current state of LLMs, advocating for innovation and building a new kind of \n", + "computer that embraces the possibilities of software 3.0.\n", + "\n", + "## Main Takeaways\n", + "- The transition to Software 3.0 marks a significant shift where natural language processing changes how we \n", + "interact with technology.\n", + "- LLMs have the potential to democratize programming, making it accessible to a broader audience.\n", + "- Collaborative human-AI systems will define the future of software development, with a focus on adapting to new \n", + "paradigms.\n", + "- Historical computing analogies indicate that we are in the early stages of a transformative era, comparable to \n", + "the 1960s in computing.\n", + "\n", + "## Additional Links\n", + "- [Software 2.0 Blog Post](https://karpathy.medium.com/software-2-0-a64152b37c35)\n", + "- [LLMs Flip the Script on Technology Diffusion](https://karpathy.bearblog.dev/power-to-the-people/)\n", + "- [Vibe Coding MenuGen Retrospective](https://karpathy.bearblog.dev/vibe-coding-menugen/)\n", + "```\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n",
+       "
\n" + ], + "text/plain": [ + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n",
+       "for Jupyter support\n",
+       "  warnings.warn('install \"ipywidgets\" for Jupyter support')\n",
+       "
\n" + ], + "text/plain": [ + "/Users/zihaolin/src/klavis/.venv/lib/python3.13/site-packages/rich/live.py:231: UserWarning: install \"ipywidgets\" \n", + "for Jupyter support\n", + " warnings.warn('install \"ipywidgets\" for Jupyter support')\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Configure MCP server parameters for CrewAI\n", + "server_params = {\n", + " \"url\": youtube_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + "}\n", + "\n", + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=LCEmiRjPEtQ\" # pick the video you want to analyze\n", + "\n", + "try:\n", + " with MCPServerAdapter(server_params) as mcp_tools:\n", + " print(f\"\u2705 Available tools from YouTube MCP server: {[tool.name for tool in mcp_tools]}\")\n", + " \n", + " # Create a YouTube Analysis Agent\n", + " youtube_agent = Agent(\n", + " role=\"YouTube Content Analyst\",\n", + " goal=\"Research and analyze YouTube videos to extract comprehensive insights and create structured summaries with timestamps\",\n", + " backstory=\"You are an expert at analyzing video content, extracting transcripts with precise timestamps, and creating professional summaries with key insights, takeaways, and time-coded references for easy navigation.\",\n", + " tools=mcp_tools,\n", + " reasoning=True,\n", + " verbose=False\n", + " )\n", + " \n", + " # Define Task\n", + " analysis_task = Task(\n", + " description=f\"Research the YouTube video at {YOUTUBE_VIDEO_URL}. Extract the video transcript, analyze the content, and create a comprehensive summary with key points, timestamps, and main takeaways. Structure it in a professional format.\",\n", + " expected_output=\"Complete video analysis with transcript, structured summary, key insights, timestamps, and main takeaways\",\n", + " agent=youtube_agent,\n", + " markdown=True\n", + " )\n", + " \n", + " # Create and execute the crew\n", + " youtube_crew = Crew(\n", + " agents=[youtube_agent],\n", + " tasks=[analysis_task],\n", + " verbose=False, # set to True to print the reasoning process\n", + " process=Process.sequential\n", + " )\n", + " \n", + " result = youtube_crew.kickoff()\n", + " print(result)\n", + " \n", + "except Exception as e:\n", + " print(f\"\u274c Error connecting to YouTube MCP server: {e}\")\n", + " print(\"Ensure the MCP server is running and accessible.\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 2: Multi-Service Research Crew with Gmail Integration\n", + "\n", + "Let's create a more complex crew that uses multiple MCP servers - YouTube for research and Gmail for communication.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 1 - Create Gmail MCP Server (OAuth required)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\ud83d\udd10 Opening OAuth authorization for Gmail...\n",
+       "
\n" + ], + "text/plain": [ + "\ud83d\udd10 Opening OAuth authorization for Gmail...\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import webbrowser\n", + "\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail...\")\n", + "print(f\"\ud83d\udcf1 If not redirected automatically, open: {gmail_mcp_instance.oauth_url}\")\n", + "# print(f\"\ud83d\udd17 Gmail MCP server will be available at: {gmail_mcp_instance.server_url}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - Create Multi-Service Crew with Multiple MCP Servers\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "VIDEO_URL = \"https://www.youtube.com/watch?v=LCEmiRjPEtQ\" # pick the video you want to analyze\n", + "RECIPIENT_EMAIL = \"zihaolin@klavis.ai\" # Replace with your email\n", + "\n", + "\n", + "# Configure multiple MCP servers\n", + "multiple_server_params = [\n", + " {\n", + " \"url\": youtube_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + " },\n", + " {\n", + " \"url\": gmail_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + " }\n", + "]\n", + "\n", + "try:\n", + " with MCPServerAdapter(multiple_server_params) as all_mcp_tools:\n", + " print(f\"\u2705 Available tools from all MCP servers: {[tool.name for tool in all_mcp_tools]}\")\n", + " \n", + " # Create YouTube Research Agent\n", + " youtube_research_agent = Agent(\n", + " role=\"YouTube Content Analyst\",\n", + " goal=\"Research and analyze YouTube videos to extract comprehensive insights and create structured summaries with timestamps\",\n", + " backstory=\"You are an expert at analyzing video content, extracting transcripts with precise timestamps, and creating professional summaries with key insights, takeaways, and time-coded references for easy navigation.\",\n", + " tools=mcp_tools,\n", + " reasoning=False,\n", + " verbose=False,\n", + " )\n", + " \n", + " # Create Email Communication Agent\n", + " email_agent = Agent(\n", + " role=\"Email Communications Specialist\",\n", + " goal=\"Draft and send professional email communications based on research findings\",\n", + " backstory=\"You are skilled at crafting professional emails and managing correspondence with clear, impactful messaging.\",\n", + " tools=mcp_tools,\n", + " reasoning=True,\n", + " verbose=False,\n", + " )\n", + " \n", + " # Define workflow tasks\n", + " youtube_research_task = Task(\n", + " description=f\"Research the YouTube video at {VIDEO_URL}. Extract transcript, analyze the content for key insights about AI and software development, and create a comprehensive analysis report with key takeaways and recommendations.\",\n", + " expected_output=\"Complete video analysis report with transcript, key insights, recommendations, and strategic takeaways\",\n", + " agent=youtube_research_agent\n", + " )\n", + " \n", + " send_email_task = Task(\n", + " description=f\"Based on the youtube analyze, draft and send a professional email to {RECIPIENT_EMAIL} with the subject 'Youtube video AI analysis'. Include content of the youtube video in the email.\",\n", + " expected_output=\"Confirmation that a professional email has been sent with the research insights\",\n", + " agent=email_agent,\n", + " context=[youtube_research_task]\n", + " )\n", + " \n", + " # Create and execute the crew\n", + " multi_service_crew = Crew(\n", + " agents=[youtube_research_agent, email_agent],\n", + " tasks=[youtube_research_task, send_email_task],\n", + " verbose=False, # set to True to print the reasoning process\n", + " process=Process.sequential\n", + " )\n", + " \n", + " result = multi_service_crew.kickoff()\n", + " print(result)\n", + " \n", + "except Exception as e:\n", + " print(f\"\u274c Error with multi-service MCP integration: {e}\")\n", + " print(\"Ensure all MCP servers are running and properly authenticated.\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Security Best Practices\n", + "\n", + "When using CrewAI with Klavis MCP servers, follow these security guidelines:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Example of secure MCP server configuration\n", + "def create_secure_mcp_crew():\n", + " \"\"\"\n", + " Demonstrates secure MCP server integration with CrewAI\n", + " \"\"\"\n", + " # Use environment variables for sensitive data\n", + " api_key = os.getenv(\"KLAVIS_API_KEY\")\n", + " if not api_key:\n", + " raise ValueError(\"KLAVIS_API_KEY environment variable is required\")\n", + " \n", + " # Validate server URLs (use HTTPS in production)\n", + " server_params = {\n", + " \"url\": youtube_mcp_instance.server_url,\n", + " \"transport\": \"streamable-http\"\n", + " }\n", + " \n", + " # Always use context managers for proper resource cleanup\n", + " try:\n", + " with MCPServerAdapter(server_params) as mcp_tools:\n", + " # Validate available tools before use\n", + " if not mcp_tools:\n", + " raise ValueError(\"No tools available from MCP server\")\n", + " \n", + " print(f\"\u2705 Securely connected with {len(mcp_tools)} tools\")\n", + " return mcp_tools\n", + " \n", + " except Exception as e:\n", + " print(f\"\ud83d\udd12 Security check failed: {e}\")\n", + " return None\n", + "\n", + "# Example usage\n", + "secure_tools = create_secure_mcp_crew()\n", + "if secure_tools:\n", + " print(\"\u2705 Secure connection established successfully\")\n", + "else:\n", + " print(\"\u274c Secure connection failed\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate CrewAI with Klavis MCP servers. We covered several key patterns:\n", + "\n", + "### \ud83c\udfaf **Key Features Demonstrated:**\n", + "\n", + "1. **Single Agent YouTube Analysis**: Using 1 agent with YouTube MCP server for comprehensive video analysis\n", + "2. **Two-Agent Multi-Service Workflow**: YouTube research agent + Email communication agent using multiple MCP servers\n", + "3. **Security Best Practices**: Secure configuration and error handling\n", + "\n", + "### \ud83d\ude80 **CrewAI + Klavis Benefits:**\n", + "\n", + "- **Seamless Integration**: MCPServerAdapter makes it easy to connect to Klavis MCP servers\n", + "- **Agent Specialization**: Different agents can have access to different tools\n", + "- **Efficient Workflows**: Streamlined crews with focused responsibilities\n", + "- **Scalable Architecture**: Easy to add more MCP servers and tools\n", + "- **Professional AI Crews**: Create sophisticated AI teams with real-world capabilities\n", + "\n", + "### \ud83d\udee0 **Available MCP Servers:**\n", + "Klavis provides MCP servers for YouTube, Gmail, Google Drive, Slack, GitHub, and many more services - all easily integrable with CrewAI!\n", + "\n", + "**Happy crew building!** \ud83d\ude80\ud83d\udc65\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/crewai/python/README.md b/examples/crewai/python/README.md new file mode 100644 index 00000000..088c0249 --- /dev/null +++ b/examples/crewai/python/README.md @@ -0,0 +1,68 @@ +# CrewAI + Klavis Multi-Service Research Crew + +This example demonstrates how to use CrewAI with Klavis MCP servers to create a multi-service AI crew that can research YouTube videos and send email summaries. + +## Features + +- **YouTube Content Analysis**: Analyzes YouTube videos and extracts transcripts with timestamps +- **Email Communication**: Sends professional email summaries based on research findings +- **Multi-Agent Workflow**: Coordinates between specialized agents for different tasks + +## Prerequisites + +- Python 3.8+ +- OpenAI API key +- Klavis API key + +## Installation + +1. Install dependencies: +```bash +pip install -r requirements.txt +``` + +2. Set environment variables: +```bash +export OPENAI_API_KEY="your_openai_api_key" +export KLAVIS_API_KEY="your_klavis_api_key" +``` + +## Usage + +1. Edit the configuration in `multi_service_crew.py`: + - Change `VIDEO_URL` to your desired YouTube video + - Update `RECIPIENT_EMAIL` to your email address + +2. Run the script: +```bash +python multi_service_crew.py +``` + +3. Follow the OAuth authorization flow for Gmail when prompted + +## How it Works + +1. **YouTube MCP Server**: Creates a server instance for YouTube video analysis +2. **Gmail MCP Server**: Creates a server instance for sending emails (requires OAuth) +3. **Research Agent**: Analyzes the YouTube video and extracts key insights +4. **Email Agent**: Drafts and sends a professional email with the research findings +5. **Sequential Workflow**: The email task waits for the research task to complete + +## Configuration + +The script uses two main configuration variables: +- `VIDEO_URL`: The YouTube video to analyze +- `RECIPIENT_EMAIL`: The email address to send results to + +## Security + +- Never commit API keys to version control +- Use environment variables for sensitive configuration +- The Gmail integration requires OAuth authentication for security + +## Troubleshooting + +- Ensure both API keys are set as environment variables +- Complete the Gmail OAuth flow before proceeding +- Check that MCP servers are accessible and running +- Review verbose output by setting `verbose=True` in the Crew configuration \ No newline at end of file diff --git a/examples/crewai/python/multi_service_crew.py b/examples/crewai/python/multi_service_crew.py new file mode 100644 index 00000000..53835eb3 --- /dev/null +++ b/examples/crewai/python/multi_service_crew.py @@ -0,0 +1,103 @@ +""" +Multi-Service Crew with YouTube and Gmail Integration +Demonstrates CrewAI + Klavis MCP servers for YouTube research and Gmail communication +""" + +import os +import webbrowser +from crewai import Agent, Task, Crew, Process +from crewai_tools import MCPServerAdapter +from klavis import Klavis +from klavis.types import McpServerName + +VIDEO_URL = "https://www.youtube.com/watch?v=LCEmiRjPEtQ" # Change to your desired video +RECIPIENT_EMAIL = "zihaolin@klavis.ai" # Replace with your email + +def main(): + """Main function to execute the multi-service research crew""" + print(f"Klavis API key: {os.getenv('KLAVIS_API_KEY')}") + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + try: + # Step 1: Create YouTube MCP Server + youtube_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, + user_id="1234") + print(f"āœ… YouTube MCP server created: {youtube_mcp_instance.server_url}") + + # Step 2: Create Gmail MCP Server (OAuth required) + gmail_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="1234") + webbrowser.open(gmail_mcp_instance.oauth_url) + + print(f"šŸ“± If you are not redirected to the Gmail OAuth page, please open it manually: {gmail_mcp_instance.oauth_url}") + + # Step 3: Configure multiple MCP servers + multiple_server_params = [ + { + "url": youtube_mcp_instance.server_url, + "transport": "streamable-http" + }, + { + "url": gmail_mcp_instance.server_url, + "transport": "streamable-http" + } + ] + + # Step 4: Create and execute the multi-service crew + with MCPServerAdapter(multiple_server_params) as all_mcp_tools: + print(f"āœ… Available tools: {[tool.name for tool in all_mcp_tools]}") + + # Create YouTube Analysis Agent + youtube_analysis_agent = Agent( + role="YouTube Content Analyst", + goal="Research and analyze YouTube videos to extract comprehensive insights and create structured summaries with timestamps", + backstory="You are an expert at analyzing video content, extracting transcripts with precise timestamps, and creating professional summaries with key insights, takeaways, and time-coded references for easy navigation.", + tools=all_mcp_tools, + reasoning=False, + verbose=False + ) + + # Create Email Agent + email_agent = Agent( + role="Email Communications Specialist", + goal="Draft and send professional email communications based on research findings", + backstory="You are skilled at crafting professional emails and managing correspondence with clear, impactful messaging.", + tools=all_mcp_tools, + reasoning=True, + verbose=False + ) + + # Define workflow tasks + youtube_analysis_task = Task( + description=f"Research the YouTube video at {VIDEO_URL}. Extract transcript, analyze the content for key insights about AI and software development, and create a comprehensive analysis report with key takeaways and recommendations.", + expected_output="Complete video analysis report with transcript, key insights, recommendations, and strategic takeaways", + agent=youtube_analysis_agent + ) + + send_email_task = Task( + description=f"Based on the youtube analysis, draft and send a professional email to {RECIPIENT_EMAIL} with the subject 'YouTube Video AI Analysis'. Include content of the youtube video in the email.", + expected_output="Confirmation that a professional email has been sent with the research insights", + agent=email_agent, + context=[youtube_analysis_task] + ) + + # Create and execute the crew + multi_service_crew = Crew( + agents=[youtube_analysis_agent, email_agent], + tasks=[youtube_analysis_task, send_email_task], + verbose=True, # Set to False to reduce output + process=Process.sequential + ) + + result = multi_service_crew.kickoff() + + print(result) + + except Exception as e: + print(f"Error with multi-service MCP integration: {e}") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/crewai/python/requirements.txt b/examples/crewai/python/requirements.txt new file mode 100644 index 00000000..f9787645 --- /dev/null +++ b/examples/crewai/python/requirements.txt @@ -0,0 +1,4 @@ +crewai>=0.130.0 +crewai-tools[mcp]>=0.48.0 +klavis>=1.0.1 +openai>=1.91.0 \ No newline at end of file diff --git a/examples/crewai/python/salesforce-gmail.py b/examples/crewai/python/salesforce-gmail.py new file mode 100644 index 00000000..3e611024 --- /dev/null +++ b/examples/crewai/python/salesforce-gmail.py @@ -0,0 +1,100 @@ +""" +Multi-Service Crew with Salesforce and Gmail Integration +Demonstrates CrewAI + Klavis MCP servers for Salesforce opportunity management and Gmail communication +""" + +import os +import webbrowser +from crewai import Agent, Task, Crew, Process +from crewai_tools import MCPServerAdapter +from klavis import Klavis +from klavis.types import McpServerName + + +def multi_server_crew(): + """Main function to execute the multi-service Salesforce crew""" + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + try: + # Step 1: Create Salesforce MCP Server + salesforce_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.SALESFORCE, + user_id="1234") + webbrowser.open(salesforce_mcp_instance.oauth_url) + + input("Press Enter after OAuth...") + + # Step 2: Create Gmail MCP Server + gmail_mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.GMAIL, + user_id="1234") + webbrowser.open(gmail_mcp_instance.oauth_url) + + input("Press Enter after OAuth...") + + # Step 3: Configure MCP servers via CrewAI + salesforce_mcp_server = MCPServerAdapter({ + "url": salesforce_mcp_instance.server_url, + "transport": "streamable-http" + }) + + gmail_mcp_server = MCPServerAdapter({ + "url": gmail_mcp_instance.server_url, + "transport": "streamable-http" + }) + + try: + # Salesforce Agent + salesforce_agent = Agent( + role="Salesforce Opportunity Analyst", + goal="Find opportunities with pending next steps", + backstory="Expert at finding Salesforce opportunities that need follow-up actions", + tools=salesforce_mcp_server.tools, + verbose=False + ) + + # Email Agent + email_agent = Agent( + role="Email Specialist", + goal="Draft follow-up emails based on Salesforce Opportunity next steps", + backstory="Expert at drafting complete, professional follow-up emails without placeholder content. Always writes proper signatures and complete email content.", + tools=gmail_mcp_server.tools, + verbose=False + ) + + # Tasks + salesforce_task = Task( + description="Find OpenAI opportunities with next steps. Extract opportunity name, next step details, and contact emails.", + expected_output="List of opportunities with next steps and contact information", + agent=salesforce_agent + ) + + email_task = Task( + description="Draft professional follow-up emails based on the Salesforce next steps. Include opportunity context and clear next actions. ", + expected_output="Draft email with complete content ready for review", + agent=email_agent + ) + + # Execute crew + crew = Crew( + agents=[salesforce_agent, email_agent], + tasks=[salesforce_task, email_task], + process=Process.sequential, + verbose=True + ) + + result = crew.kickoff() + print(result) + + except Exception as e: + print(f"Error: {e}") + finally: + salesforce_mcp_server.stop() + gmail_mcp_server.stop() + + except Exception as e: + print(f"Setup Error: {e}") + + +if __name__ == "__main__": + multi_server_crew() \ No newline at end of file diff --git a/examples/fireworks-ai/Use_Klavis_with_Fireworks.ipynb b/examples/fireworks-ai/Use_Klavis_with_Fireworks.ipynb new file mode 100644 index 00000000..11460e7a --- /dev/null +++ b/examples/fireworks-ai/Use_Klavis_with_Fireworks.ipynb @@ -0,0 +1,381 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/fireworks-ai/cookbook/blob/main/integrations/Klavis/Use_Klavis_with_Fireworks.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# Fireworks AI + Klavis AI Integration\n", + "\n", + "# \n", + "\n", + "\n", + "In this tutorial, we'll explore how to build an AI agent that integrates Fireworks AI's LLM with Klavis MCP Servers:\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **Fireworks AI API key** - see here [fireworks.ai](https://fireworks.ai/)\n", + "- **Klavis API key** - see here [klavis.ai](https://klavis.ai/)\n", + "\n", + "Make sure to keep these API keys secure and never commit them to version control!\n" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m25.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU fireworks-ai klavis" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "from fireworks.client import Fireworks\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables\n", + "os.environ[\"FIREWORKS_API_KEY\"] = \"fw_XXXXXXXXXXXXXX\" # Replace with your actual Fireworks API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"XXXXXXXXXXXXX\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Create AI Agent with MCP Integration\n", + "\n", + "Now we'll create an intelligent agent that can use MCP servers through Klavis API. This agent will:\n", + "\n", + "1. **Discover Tools**: Automatically find available tools from MCP servers\n", + "2. **Function Calling**: Use Fireworks AI's function calling capabilities\n", + "3. **Tool Execution**: Execute tools through Klavis API\n", + "4. **Smart Responses**: Generate intelligent responses based on tool results\n" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [], + "source": [ + "class Agent:\n", + " def __init__(self, fireworks_client, klavis_client, mcp_server_url):\n", + " self.fireworks = fireworks_client\n", + " self.klavis = klavis_client\n", + " self.mcp_server_url = mcp_server_url\n", + " self.model = \"accounts/fireworks/models/qwen2p5-72b-instruct\"\n", + " print(f\"\ud83e\udd16 Agent initialized with model: {self.model}\")\n", + " \n", + " def process_request(self, user_message):\n", + " # 1. Get available tools\n", + " mcp_tools = self.klavis.mcp_server.list_tools(\n", + " server_url=self.mcp_server_url,\n", + " format=ToolFormat.OPENAI,\n", + " )\n", + " \n", + " # 2. Call LLM with tools\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n", + " {\"role\": \"user\", \"content\": user_message}\n", + " ]\n", + " \n", + " response = self.fireworks.chat.completions.create(\n", + " model=self.model,\n", + " messages=messages,\n", + " tools=mcp_tools.tools\n", + " )\n", + " \n", + " assistant_message = response.choices[0].message\n", + " messages.append(assistant_message)\n", + " \n", + " # 3. If LLM wants to use tools\n", + " if assistant_message.tool_calls:\n", + " \n", + " # Execute each tool call\n", + " for tool_call in assistant_message.tool_calls:\n", + " tool_name = tool_call.function.name\n", + " tool_args = json.loads(tool_call.function.arguments)\n", + " \n", + " print(f\"\ud83d\udee0\ufe0f Calling tool: {tool_name} with args: {tool_args}\")\n", + " # Call tool via Klavis SDK\n", + " tool_result = self.klavis.mcp_server.call_tools(\n", + " server_url=self.mcp_server_url,\n", + " tool_name=tool_name,\n", + " tool_args=tool_args,\n", + " )\n", + " \n", + " messages.append({\n", + " \"role\": \"tool\",\n", + " \"tool_call_id\": tool_call.id,\n", + " \"content\": str(tool_result)\n", + " })\n", + " \n", + " # 4. Get final response from LLM\n", + " final_response = self.fireworks.chat.completions.create(\n", + " model=self.model,\n", + " messages=messages\n", + " )\n", + " return final_response.choices[0].message.content\n", + " \n", + " # If no tools needed, return the assistant message directly\n", + " return assistant_message.content\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Use Case 1: Summarize YouTube Video" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Created YouTube MCP instance\n", + "\ud83e\udd16 Agent initialized with model: accounts/fireworks/models/qwen2p5-72b-instruct\n", + "\ud83d\udee0\ufe0f Calling tool: get_youtube_video_transcript with args: {'url': 'https://www.youtube.com/watch?v=kPXvf2-C_Hs'}\n", + "### Summary of the Video: \"How This AI Startup Grew by 100x in Just 6 Months | Fireworks AI, Lin Qiao\"\n", + "\n", + "**Title:** How This AI Startup Grew by 100x in Just 6 Months | Fireworks AI, Lin Qiao \n", + "**Published:** August 20, 2024 \n", + "**Channel:** EO \n", + "**Duration:** 11 minutes 55 seconds \n", + "**Views:** 77,278 \n", + "**Likes:** 1,650 \n", + "**Comments:** 51\n", + "\n", + "#### **0:00 - 1:00: Introduction**\n", + "- Introduction to Fireworks AI, one of the fastest-growing AI startups in the industry.\n", + "- Over the past six months, Fireworks AI has seen a 100x increase in traffic.\n", + "- In July, the company closed a Series B funding round led by Sequoia, bringing its valuation to $552 million.\n", + "\n", + "#### **1:00 - 3:30: Lin Qiao's Background**\n", + "- Lin Qiao, the founder of Fireworks AI, previously served as the Head of PyTorch.\n", + "- She shares how her experiences at PyTorch inspired the creation of Fireworks AI.\n", + "- Lin highlights the importance of staying connected with the latest AI developments and the community.\n", + "\n", + "#### **3:30 - 5:00: The AI Transformation**\n", + "- Discussion on the ongoing AI transformation and its impact on various industries.\n", + "- Lin emphasizes the need for adaptability and continuous learning in the AI landscape.\n", + "- She talks about the rapid pace of innovation and the importance of staying ahead of the curve.\n", + "\n", + "#### **5:00 - 7:00: Fireworks AI's Growth Strategy**\n", + "- Explanation of the key strategies that drove Fireworks AI's exponential growth.\n", + "- Focus on user-centric design and scalable infrastructure.\n", + "- Importance of community engagement and feedback loops in product development.\n", + "\n", + "#### **7:00 - 9:00: Insights for Software Engineers**\n", + "- Lin provides insights into what it takes to excel as a software engineer in the AI industry.\n", + "- Emphasis on practical skills, theoretical knowledge, and the ability to work in interdisciplinary teams.\n", + "- Advice on building a strong foundation in AI and staying updated with the latest research.\n", + "\n", + "#### **9:00 - 11:00: The Future of Fireworks AI**\n", + "- Lin shares her vision for the future of Fireworks AI.\n", + "- Plans for expanding into new markets and developing cutting-edge AI applications.\n", + "- The importance of maintaining a startup culture even as the company scales.\n", + "\n", + "**Contact Information:**\n", + "- **Twitter:** @EO__Global\n", + "- **LinkedIn:** @EO STUDIO\n", + "- **Instagram:** @eostudio.official\n", + "\n", + "**Subtitles:**\n", + "- Created using [XL8.ai](http://xl8.ai/) machine translation.\n", + "\n", + "This video provides valuable insights into the success of Fireworks AI and the broader AI industry, making it a must-watch for entrepreneurs, software engineers, and anyone interested in AI's future.\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=kPXvf2-C_Hs\" # pick a video you like!\n", + "\n", + "# 1. Initialize Fireworks AI client and Klavis client\n", + "fireworks_client = Fireworks(api_key=os.getenv(\"FIREWORKS_API_KEY\"))\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# 2. Create YouTube MCP server instance\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# 3. Create an agent with YouTube MCP server\n", + "agent = Agent(fireworks_client, klavis_client, youtube_mcp_instance.server_url)\n", + "\n", + "response = agent.process_request(\n", + " f\"Summarize this YouTube video with timestamps: {YOUTUBE_VIDEO_URL}\"\n", + ")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Use Case 2: Send Email\n", + "\n", + "**Note**: Gmail integration requires OAuth authentication, so you'll need to authorize the application in your browser. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import webbrowser\n", + "\n", + "# Create Gmail MCP server instance\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# Redirect to Gmail OAuth page\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: {gmail_mcp_instance.oauth_url}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The email has been sent successfully to zihaolin@klavis.ai with the subject \"Hello, World!\" and the body \"This is a test email.\" The email ID is 1977620de59daa96.\n" + ] + } + ], + "source": [ + "EMAIL_SUBJECT = \"Hello, World!\"\n", + "EMAIL_BODY = \"This is a test email.\"\n", + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # replace with your email\n", + "\n", + "# Create an agent with Gmail MCP server\n", + "agent = Agent(fireworks_client, klavis_client, gmail_mcp_instance.server_url)\n", + "\n", + "response = agent.process_request(\n", + " f\"Send an email to {EMAIL_RECIPIENT} with subject {EMAIL_SUBJECT} and body {EMAIL_BODY}\"\n", + ")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate Fireworks AI's function calling capabilities with Klavis MCP servers to create powerful AI applications. We covered two practical examples:\n", + "\n", + "**\ud83c\udfa5 YouTube Integration**: Built an AI assistant that can automatically summarize YouTube videos by extracting transcripts and providing detailed, timestamped summaries.\n", + "\n", + "**\ud83d\udce7 Gmail Integration**: Created an AI-powered email assistant that can send emails through Gmail with OAuth authentication.\n", + "\n", + "### Key Takeaways:\n", + "- **Easy Setup**: Klavis MCP servers can be created with just a few lines of code using the official SDK\n", + "- **Fireworks AI Compatible**: All tools are formatted for seamless Fireworks AI function calling\n", + "- **Versatile**: Support for both simple APIs (YouTube) and OAuth-authenticated services (Gmail)\n", + "- **Scalable**: The same pattern can be applied to any of the 100+ MCP servers available in Klavis\n", + "\n", + "### \ud83d\ude80 Next Steps\n", + "- **Explore More MCP Servers**: Try other available servers like Slack, Notion, CRM, etc.\n", + "- **Try Different Fireworks AI Models**: Experiment with various models like Llama, Mixtral, or Deepseek for different use cases\n", + "- **Build Complex Multi-Server Workflows**: Create sophisticated agents that combine Gmail + Slack + Notion for complete business automation\n", + "- **Production Deployment**: Scale these patterns for production applications\n", + "\n", + "### \ud83d\udd17 Useful Resources\n", + "- [Fireworks AI Documentation](https://docs.fireworks.ai/)\n", + "- [Klavis AI Documentation](https://www.klavis.ai/docs/)\n", + "- [MCP Protocol Specification](https://modelcontextprotocol.io/)\n", + "\n", + "**Happy building!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/google-gemini-cli/README.md b/examples/google-gemini-cli/README.md new file mode 100644 index 00000000..6edfb183 --- /dev/null +++ b/examples/google-gemini-cli/README.md @@ -0,0 +1,127 @@ +# klavis + +A CLI tool for easily managing Klavis AI MCP servers in Google Gemini CLI. + +## Installation + +```bash +npm install -g klavis +``` + +## Usage + +### Help + +```bash +klavis gemini --help +``` + +Shows detailed help information with all commands and examples. + +## Commands + +### Add MCP Server + +```bash +klavis gemini add +``` + +**Parameters:** +- ``: URL of your MCP server instance. + +**Note:** Only Klavis AI MCPs can be added with this tool. + +### Remove MCP Server + +```bash +klavis gemini remove +``` + +**Parameters:** +- ``: Name of the MCP to remove (e.g., `gmail`, `slack`, `notion`) + +**Note:** Only Klavis AI MCPs can be removed with this tool. + +### List MCP Servers + +```bash +klavis gemini list +``` + +Shows all currently configured Klavis AI MCP servers. + +### Clear All MCP Servers + +```bash +klavis gemini clear --force +``` + +Removes all Klavis AI MCP servers from the configuration. Requires `--force` flag for safety. + +### Examples + +**Add Gmail MCP Server:** +```bash +klavis gemini add https://gmail-mcp-server.klavis.ai/mcp/?instance_id=your-id +``` + +**Add Slack MCP Server:** +```bash +klavis gemini add https://slack-mcp-server.klavis.ai/mcp/?instance_id=your-id +``` + +**Add Notion MCP Server:** +```bash +klavis gemini add https://notion-mcp-server.klavis.ai/mcp/?instance_id=your-id +``` + +**Remove Gmail MCP Server:** +```bash +klavis gemini remove gmail +``` + +**Remove Slack MCP Server:** +```bash +klavis gemini remove slack +``` + +**List all MCP Servers:** +```bash +klavis gemini list +``` + +**Clear all MCP Servers:** +```bash +klavis gemini clear --force +``` + +## What It Does + +1. **Locates** your Gemini config settings (`~/.gemini/settings.json`) +2. **Creates backup** of existing settings +3. **Adds, removes, lists, or clears** Klavis AI MCP server configurations in your Gemini settings +4. **Preserves** all existing preferences and authentication + +## Configuration + +After adding an MCP server, it will be added to `~/.gemini/settings.json`: + +```json +{ + "mcpServers": { + "gmail": { + "command": "npx", + "args": ["mcp-remote", "https://gmail-mcp-server.klavis.ai/mcp/?instance_id=your-id"] + } + } +} +``` + +## Requirements + +- Node.js 14.0.0 or higher +- Google Gemini installed with `.gemini` configuration directory + +## License + +MIT \ No newline at end of file diff --git a/examples/google-gemini-cli/index.js b/examples/google-gemini-cli/index.js new file mode 100644 index 00000000..e1ecb277 --- /dev/null +++ b/examples/google-gemini-cli/index.js @@ -0,0 +1,282 @@ +#!/usr/bin/env node +const fs = require("fs"); +const path = require("path"); +const os = require("os"); + +function parseArgs(argv) { + const args = { _: [], flags: {} }; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (a.startsWith("--")) { + const key = a.replace(/^--/, ""); + const value = argv[i + 1] && !argv[i + 1].startsWith("-") ? argv[++i] : true; + args.flags[key] = value; + } else { + args._.push(a); + } + } + return args; +} + +// Get settings path for Gemini +function getSettingsPath() { + const geminiDir = path.join(os.homedir(), ".gemini"); + if (!fs.existsSync(geminiDir)) fs.mkdirSync(geminiDir); + return path.join(geminiDir, "settings.json"); +} + +// Check if an MCP server is from Klavis AI +function isKlavisAiService(serverConfig) { + if (!serverConfig || !serverConfig.args || !serverConfig.args[1]) { + return false; + } + const url = serverConfig.args[1]; + return url.includes('klavis.ai'); +} + +// Create backup and cleanup old backups +function createBackup(settingsPath) { + if (!fs.existsSync(settingsPath)) { + return; + } + + const backupPath = `${settingsPath}.bak.${Date.now()}`; + fs.copyFileSync(settingsPath, backupPath); + + // Clean up old backups - keep only 1 most recent + try { + const geminiDir = path.dirname(settingsPath); + const backupFiles = fs.readdirSync(geminiDir) + .filter(file => file.startsWith('settings.json.bak.')) + .map(file => ({ + name: file, + path: path.join(geminiDir, file), + timestamp: parseInt(file.split('.bak.')[1]) || 0 + })) + .sort((a, b) => b.timestamp - a.timestamp); // Sort by timestamp, newest first + + // Remove backups beyond the 1 most recent + if (backupFiles.length > 1) { + const toDelete = backupFiles.slice(1); + toDelete.forEach(backup => { + fs.unlinkSync(backup.path); + }); + } + } catch (e) { + // Silently ignore cleanup errors + } +} + +// Show help information +function showHelp() { + console.log("šŸ“š Klavis AI - MCP Server Manager for Gemini"); + console.log("==========================================="); + console.log(""); + console.log("DESCRIPTION:"); + console.log(" A CLI tool for managing Klavis AI MCP servers in Gemini CLI"); + console.log(""); + console.log("USAGE:"); + console.log(" klavis [options]"); + console.log(""); + console.log("COMMANDS:"); + console.log(" gemini --help Show this help message"); + console.log(" gemini add Add a Klavis AI MCP server to Gemini"); + console.log(" gemini remove Remove a Klavis AI MCP server from Gemini"); + console.log(" gemini list List all configured Klavis AI MCP servers"); + console.log(" gemini clear --force Remove all Klavis AI MCP servers from Gemini"); + console.log(""); + console.log(""); + console.log("EXAMPLES:"); + console.log(" # Show help"); + console.log(" klavis gemini --help"); + console.log(""); + console.log(" # Add an MCP server"); + console.log(" klavis gemini add https://myservice-mcp-server.klavis.ai/mcp/?instance_id=your-id"); + console.log(""); + console.log(" # List all configured servers"); + console.log(" klavis gemini list"); + console.log(""); + console.log(" # Remove a specific server"); + console.log(" klavis gemini remove gmail"); + console.log(""); + console.log(" # Clear all Klavis AI servers"); + console.log(" klavis gemini clear --force"); + console.log(""); + console.log("NOTES:"); + console.log(" • Only Klavis AI MCPs can be managed with this tool"); + console.log(" • Settings are stored in ~/.gemini/settings.json"); + console.log(" • Automatic backups are created before modifications"); + console.log(""); +} + +function main() { + const args = parseArgs(process.argv.slice(2)); + + // Handle help flag + if (args._[0] === "gemini" && args.flags.help) { + showHelp(); + return; + } + + if (args._[0] !== "gemini" || !["add", "remove", "list", "clear", "help"].includes(args._[1])) { + console.error("Usage: klavis gemini add | klavis gemini remove | klavis gemini list | klavis gemini clear --force | klavis gemini --help"); + console.error("For detailed help, run: klavis gemini --help"); + process.exit(1); + } + + // For add and remove commands, require the third argument + if ((args._[1] === "add" || args._[1] === "remove") && !args._[2]) { + console.error("Usage: klavis gemini add | klavis gemini remove | klavis gemini list | klavis gemini clear --force | klavis gemini --help"); + process.exit(1); + } + + // For clear command, require the --force flag + if (args._[1] === "clear" && !args.flags.force) { + console.error("āŒ Clear command requires --force flag for safety"); + console.error("Usage: klavis gemini clear --force"); + process.exit(1); + } + + const action = args._[1]; + const input = args._[2]; + let service, instanceUrl; + + if (action === "add") { + // For add command, expect URL + instanceUrl = input; + if (!instanceUrl.startsWith('http')) { + console.error("āŒ Invalid URL format. URL must start with http or https"); + process.exit(1); + } + + // Extract service name from URL + const urlMatch = instanceUrl.match(/https?:\/\/([^.]+)\.klavis\.ai/); + if (!urlMatch) { + console.error("āŒ Invalid URL format. Expected pattern: https://SERVICE-mcp-server.klavis.ai/"); + process.exit(1); + } + + // Check if URL is from Klavis AI + if (!instanceUrl.includes('klavis.ai')) { + console.error("āŒ Only Klavis AI MCP servers can be added with this tool"); + process.exit(1); + } + + service = urlMatch[1].toLowerCase(); + } else if (action === "remove") { + // For remove command, expect service name + service = input.toLowerCase(); + } else if (action === "list") { + // For list command, no additional input needed + } else if (action === "clear") { + // For clear command, no additional input needed + } + + const settingsPath = getSettingsPath(); + let settings = {}; + + // Read existing settings or create fresh + if (fs.existsSync(settingsPath)) { + try { + const rawData = fs.readFileSync(settingsPath, "utf-8"); + // Fix common JSON issues like trailing commas + const cleanedData = rawData + .replace(/,(\s*[}\]])/g, '$1') // Remove trailing commas + .replace(/([{,]\s*)([a-zA-Z_$][a-zA-Z0-9_$]*)\s*:/g, '$1"$2":'); // Quote unquoted keys + + settings = JSON.parse(cleanedData); + } catch (e) { + console.error("āŒ Error reading existing settings:", e.message); + console.error("šŸ’” Try fixing JSON syntax in:", settingsPath); + process.exit(1); + } + } + + settings.mcpServers = settings.mcpServers || {}; + + // Handle add, remove, or list action + if (action === "list") { + // List only Klavis AI MCP servers + const mcpServers = settings.mcpServers || {}; + const klavisServerNames = Object.keys(mcpServers).filter(name => + isKlavisAiService(mcpServers[name]) + ); + + if (klavisServerNames.length === 0) { + console.log("šŸ“‹ No Klavis AI MCP servers configured in Gemini settings"); + console.log(`šŸ’” Add a server with: klavis gemini add `); + return; + } + + console.log("šŸ“‹ Available Klavis AI MCP Servers:"); + console.log("==================================="); + + klavisServerNames.forEach((name, index) => { + console.log(`${index + 1}. ${name}`); + }); + + console.log(`\nTotal: ${klavisServerNames.length} Klavis AI MCP server(s) configured`); + return; + + } else if (action === "clear") { + // Clear only Klavis AI MCP servers + const mcpServers = settings.mcpServers || {}; + const klavisServerNames = Object.keys(mcpServers).filter(name => + isKlavisAiService(mcpServers[name]) + ); + + if (klavisServerNames.length === 0) { + console.log("šŸ“‹ No Klavis AI MCP servers to clear - configuration is already empty"); + return; + } + + // Backup before clearing + createBackup(settingsPath); + + // Remove only Klavis AI MCP servers + klavisServerNames.forEach(name => { + delete settings.mcpServers[name]; + }); + + fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2), "utf-8"); + console.log(`āœ… Cleared ${klavisServerNames.length} Klavis AI MCP server(s) from Gemini settings at ${settingsPath}`); + return; + + } else if (action === "add") { + // Backup before saving + createBackup(settingsPath); + + // Add new MCP service + settings.mcpServers[service] = { + command: "npx", + args: ["mcp-remote", instanceUrl] + }; + + fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2), "utf-8"); + console.log(`āœ… Added ${service} MCP to Gemini settings at ${settingsPath}`); + + } else if (action === "remove") { + // Check if service exists + if (!settings.mcpServers || !settings.mcpServers[service]) { + console.error(`āŒ Service '${service}' not found in Gemini settings`); + process.exit(1); + } + + // Check if service is from Klavis AI + if (!isKlavisAiService(settings.mcpServers[service])) { + console.error(`āŒ Service '${service}' is not a Klavis AI service and cannot be removed with this tool`); + process.exit(1); + } + + // Backup before saving + createBackup(settingsPath); + + // Remove MCP service + delete settings.mcpServers[service]; + + fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2), "utf-8"); + console.log(`āœ… Removed ${service} MCP from Gemini settings at ${settingsPath}`); + } +} + +main(); diff --git a/examples/google-gemini-cli/package.json b/examples/google-gemini-cli/package.json new file mode 100644 index 00000000..dd6da89c --- /dev/null +++ b/examples/google-gemini-cli/package.json @@ -0,0 +1,18 @@ +{ + "name": "klavis", + "version": "1.0.0", + "description": "CLI tool for managing Klavis AI MCP servers in Google Gemini CLI", + "keywords": ["mcp", "gemini", "cli", "automation", "server", "config"], + "author": "klavis.ai", + "license": "MIT", + "bin": { + "klavis": "./index.js" + }, + "engines": { + "node": ">=14.0.0" + }, + "files": [ + "index.js", + "README.md" + ] +} diff --git a/examples/google-genai/Use_Klavis_with_Gemini.ipynb b/examples/google-genai/Use_Klavis_with_Gemini.ipynb new file mode 100644 index 00000000..eff15ea4 --- /dev/null +++ b/examples/google-genai/Use_Klavis_with_Gemini.ipynb @@ -0,0 +1,397 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/gemini/Use_Klavis_with_Gemini.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# Gemini + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to use Google's Gemini with function calling with Klavis MCP (Model Context Protocol) servers.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "- **Gemini API key** - Get at [ai.google.dev](https://ai.google.dev/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU google-generativeai klavis" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import webbrowser\n", + "from google import genai\n", + "from google.genai import types\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables (you can also use .env file)\n", + "os.environ[\"GEMINI_API_KEY\"] = \"YOUR_GEMINI_API_KEY\" # Replace with your actual Gemini API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n", + "\n", + "# Initialize clients\n", + "gemini_client = genai.Client(api_key=os.getenv(\"GEMINI_API_KEY\"))\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 1 : Gemini + YouTube MCP Server\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 1 - Create YouTube MCP Server using Klavis\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd17 YouTube MCP server created at: https://youtube-mcp-server.klavis.ai/mcp/?instance_id=1910fcd2-426a-4e67-afbe-39234e044db9, and the instance id is 1910fcd2-426a-4e67-afbe-39234e044db9\n" + ] + } + ], + "source": [ + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "print(f\"\ud83d\udd17 YouTube MCP server created at: {youtube_mcp_instance.server_url}, and the instance id is {youtube_mcp_instance.instance_id}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - Create general method to use MCP Server with Gemini\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "def gemini_with_mcp_server(mcp_server_url: str, user_query: str):\n", + " # Get tools from MCP server\n", + " mcp_server_tools = klavis_client.mcp_server.list_tools(\n", + " server_url=mcp_server_url,\n", + " format=ToolFormat.GEMINI,\n", + " )\n", + " print(f\"\ud83d\udce6 Available tools: {mcp_server_tools}\")\n", + " \n", + " # Prepare conversation contents\n", + " contents = [types.Content(role=\"user\", parts=[types.Part(text=user_query)])]\n", + " \n", + " # Generate response with function calling\n", + " response = gemini_client.models.generate_content(\n", + " model='gemini-1.5-pro',\n", + " contents=contents,\n", + " config=types.GenerateContentConfig(tools=mcp_server_tools.tools)\n", + " )\n", + " \n", + " if response.candidates and response.candidates[0].content.parts:\n", + " contents.append(response.candidates[0].content)\n", + " \n", + " # Check if there are function calls to execute\n", + " has_function_calls = False\n", + " for part in response.candidates[0].content.parts:\n", + " if hasattr(part, 'function_call') and part.function_call:\n", + " has_function_calls = True\n", + " print(f\"\ud83d\udd27 Calling function: {part.function_call.name}\")\n", + " \n", + " try:\n", + " # Execute tool call via Klavis\n", + " function_result = klavis_client.mcp_server.call_tools(\n", + " server_url=mcp_server_url,\n", + " tool_name=part.function_call.name,\n", + " tool_args=dict(part.function_call.args),\n", + " )\n", + " \n", + " # Create function response in the proper format\n", + " function_response = {'result': function_result.result}\n", + " \n", + " except Exception as e:\n", + " print(f\"Function call error: {e}\")\n", + " function_response = {'error': str(e)}\n", + " \n", + " # Add function response to conversation\n", + " function_response_part = types.Part.from_function_response(\n", + " name=part.function_call.name,\n", + " response=function_response,\n", + " )\n", + " function_response_content = types.Content(\n", + " role='tool', \n", + " parts=[function_response_part]\n", + " )\n", + " contents.append(function_response_content)\n", + " \n", + " if has_function_calls:\n", + " # Generate final response after function calls\n", + " final_response = gemini_client.models.generate_content(\n", + " model='gemini-1.5-pro',\n", + " contents=contents,\n", + " config=types.GenerateContentConfig(tools=mcp_server_tools.tools)\n", + " )\n", + " return final_response.text\n", + " else:\n", + " # No function calls, return original response\n", + " return response.text\n", + " else:\n", + " return \"No response generated.\"\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 3 - Summarize your favorite video!\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udce6 Available tools: success=True tools=[{'function_declarations': [{'name': 'get_youtube_video_transcript', 'description': \"Retrieve the transcript or video details for a given YouTube video. The 'start' time in the transcript is formatted as MM:SS or HH:MM:SS.\", 'parameters': {'type': 'object', 'properties': {'url': {'type': 'string', 'description': 'The URL of the YouTube video to retrieve the transcript/subtitles for. (e.g. https://www.youtube.com/watch?v=dQw4w9WgXcQ)', 'items': None}}, 'required': ['url']}}]}] format= error=None\n", + "\ud83d\udd27 Calling function: get_youtube_video_transcript\n", + "Andrej Karpathy, former director of AI at Tesla, discusses the evolution of software and the impact of large language models (LLMs).\n", + "\n", + "**Software 1.0, 2.0, and 3.0:** Karpathy describes three paradigms of software development: Software 1.0 involves writing explicit code for computers, while Software 2.0 uses neural networks, where the code is the network's weights, learned through optimization. Software 3.0, the latest paradigm, utilizes natural language prompts to program LLMs. He draws parallels between Hugging Face and GitHub, highlighting how Hugging Face acts as a repository for Software 2.0, similar to GitHub's role for Software 1.0.\n", + "\n", + "**LLMs as Operating Systems:** Karpathy argues that LLMs are not merely utilities like electricity, but complex operating systems. He likens LLM labs (OpenAI, Gemini, etc.) to utility providers, building the infrastructure (the grid), and charging for metered access. He also points out that LLMs share characteristics with fabs, requiring substantial capital expenditure and possessing rapidly evolving technology trees. He draws parallels to the operating system landscape, with closed-source providers like Windows and Mac OS and open-source alternatives like Linux, comparing them to the current state of LLMs, where a few companies control access and open-source models like Llama are emerging. He emphasizes the importance of fluency in all three software paradigms for those entering the industry, due to the unique advantages and disadvantages of each. Karpathy envisions LLMs as the CPU, context windows as memory, and prompts as instructions in this new operating system. He further notes that LLM apps, like Cursor and Perplexity, resemble traditional apps running on different operating systems, and he suggests we are in a stage similar to the 1960s of computing, with centralized, time-shared access being the norm due to cost.\n", + "\n", + "**The Psychology of LLMs:** Karpathy describes LLMs as \"stochastic simulations of people,\" possessing encyclopedic knowledge and memory, similar to an autistic savant. However, they also exhibit cognitive deficits, including hallucinations, jagged intelligence (superhuman in some areas, subhuman in others), and a lack of self-knowledge. He points to their susceptibility to prompt injection and data leaks as security concerns. He recommends watching the movies \"Rainman\", \"Memento\", and \"51st Dates\" to better understand the memory and knowledge retention characteristics of LLMs. These limitations require carefully crafted prompts and a balanced approach to utilizing their strengths while mitigating their weaknesses.\n", + "\n", + "**Opportunities and Challenges:** Karpathy advocates for \"partial autonomy apps\" that leverage LLMs while maintaining human oversight. He emphasizes the importance of fast generation-verification loops, aided by GUIs and visual representations. He cautions against overreliance on AI agents, stressing the need to keep them \"on the leash\" due to their fallibility. He draws an analogy to his experience at Tesla, where initial success with self-driving cars in 2013 led to overly optimistic predictions. The lesson, he argues, is that software development, like driving, is complex, and complete autonomy will take time. He recommends focusing on partial autonomy products with user-friendly interfaces and adjustable autonomy sliders, allowing for increased automation as the technology matures. He encourages developers to consider how to give LLMs access to the information humans can see and the actions they can take, and to allow humans to effectively supervise their work. He recommends thinking of this as building Iron Man suits (augmentation) rather than Iron Man robots (full autonomy) for now.\n", + "\n", + "**Vibe Coding and Agent-Based Development:** Karpathy discusses the democratization of programming through natural language, citing the popularity of \"vibe coding,\" where users, even without formal coding experience, can describe what they want to create, allowing LLMs to generate code based on the prompt. This natural language interface opens up software development to a much wider audience. He showcases his own experience with vibe coding an iOS app and the \"Menu Genen\" app. He highlights that generating code is now the easy part, while the difficulty lies in deployment and Dev Ops tasks. He also suggests the use of simple, LLM-friendly formats for documentation, such as markdown or protocols like the Model Context Protocol, to facilitate better interaction with these models. He mentions using natural language prompts to utilize documentation like the Manim animation library. He proposes ideas like `llm.txt` (analogous to `robots.txt`) for websites to communicate directly with LLMs. He also points out that GUIs are not agent-friendly and much current documentation is for humans, not LLMs, requiring a shift in how interfaces and information are presented.\n", + "\n", + "Karpathy concludes with an optimistic outlook on the future of software development, emphasizing the need for both human-driven coding and agent-based development, and the exciting possibilities that arise from the combination of human creativity and AI capabilities.\n", + "\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=LCEmiRjPEtQ\" # pick a video you like!\n", + "\n", + "result = gemini_with_mcp_server(\n", + " mcp_server_url=youtube_mcp_instance.server_url, \n", + " user_query=f\"Please provide a complete summary of this YouTube video with timestamp: {YOUTUBE_VIDEO_URL}\"\n", + ")\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "\u2705 Great! You've successfully created an AI agent that uses Gemini's function calling with Klavis MCP servers to summarize YouTube videos!\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 2 : Gemini + Gmail MCP Server (OAuth needed)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Create Gmail MCP server instance\n", + "gmail_mcp_server = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "webbrowser.open(gmail_mcp_server.oauth_url)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udce6 Available tools: success=True tools=[{'function_declarations': [{'name': 'send_email', 'description': 'Sends a new email', 'parameters': {'type': 'object', 'properties': {'to': {'type': 'array', 'description': 'List of recipient email addresses', 'items': {'type': 'string'}}, 'subject': {'type': 'string', 'description': 'Email subject', 'items': None}, 'body': {'type': 'string', 'description': 'Email body content (used for text/plain or when htmlBody not provided)', 'items': None}, 'htmlBody': {'type': 'string', 'description': 'HTML version of the email body', 'items': None}, 'mimeType': {'type': 'string', 'description': 'Email content type', 'items': None}, 'cc': {'type': 'array', 'description': 'List of CC recipients', 'items': {'type': 'string'}}, 'bcc': {'type': 'array', 'description': 'List of BCC recipients', 'items': {'type': 'string'}}, 'threadId': {'type': 'string', 'description': 'Thread ID to reply to', 'items': None}, 'inReplyTo': {'type': 'string', 'description': 'Message ID being replied to', 'items': None}}, 'required': ['to', 'subject', 'body']}}, {'name': 'draft_email', 'description': 'Draft a new email', 'parameters': {'type': 'object', 'properties': {'to': {'type': 'array', 'description': 'List of recipient email addresses', 'items': {'type': 'string'}}, 'subject': {'type': 'string', 'description': 'Email subject', 'items': None}, 'body': {'type': 'string', 'description': 'Email body content (used for text/plain or when htmlBody not provided)', 'items': None}, 'htmlBody': {'type': 'string', 'description': 'HTML version of the email body', 'items': None}, 'mimeType': {'type': 'string', 'description': 'Email content type', 'items': None}, 'cc': {'type': 'array', 'description': 'List of CC recipients', 'items': {'type': 'string'}}, 'bcc': {'type': 'array', 'description': 'List of BCC recipients', 'items': {'type': 'string'}}, 'threadId': {'type': 'string', 'description': 'Thread ID to reply to', 'items': None}, 'inReplyTo': {'type': 'string', 'description': 'Message ID being replied to', 'items': None}}, 'required': ['to', 'subject', 'body']}}, {'name': 'read_email', 'description': 'Retrieves the content of a specific email', 'parameters': {'type': 'object', 'properties': {'messageId': {'type': 'string', 'description': 'ID of the email message to retrieve', 'items': None}}, 'required': ['messageId']}}, {'name': 'search_emails', 'description': 'Searches for emails using Gmail search syntax', 'parameters': {'type': 'object', 'properties': {'query': {'type': 'string', 'description': \"Gmail search query (e.g., 'from:example@gmail.com')\", 'items': None}, 'maxResults': {'type': 'number', 'description': 'Maximum number of results to return', 'items': None}}, 'required': ['query']}}, {'name': 'modify_email', 'description': 'Modifies email labels (move to different folders)', 'parameters': {'type': 'object', 'properties': {'messageId': {'type': 'string', 'description': 'ID of the email message to modify', 'items': None}, 'addLabelIds': {'type': 'array', 'description': 'List of label IDs to add to the message', 'items': {'type': 'string'}}, 'removeLabelIds': {'type': 'array', 'description': 'List of label IDs to remove from the message', 'items': {'type': 'string'}}}, 'required': ['messageId']}}, {'name': 'delete_email', 'description': 'Permanently deletes an email', 'parameters': {'type': 'object', 'properties': {'messageId': {'type': 'string', 'description': 'ID of the email message to delete', 'items': None}}, 'required': ['messageId']}}, {'name': 'batch_modify_emails', 'description': 'Modifies labels for multiple emails in batches', 'parameters': {'type': 'object', 'properties': {'messageIds': {'type': 'array', 'description': 'List of message IDs to modify', 'items': {'type': 'string'}}, 'addLabelIds': {'type': 'array', 'description': 'List of label IDs to add to all messages', 'items': {'type': 'string'}}, 'removeLabelIds': {'type': 'array', 'description': 'List of label IDs to remove from all messages', 'items': {'type': 'string'}}, 'batchSize': {'type': 'number', 'description': 'Number of messages to process in each batch (default: 50)', 'items': None}}, 'required': ['messageIds']}}, {'name': 'batch_delete_emails', 'description': 'Permanently deletes multiple emails in batches', 'parameters': {'type': 'object', 'properties': {'messageIds': {'type': 'array', 'description': 'List of message IDs to delete', 'items': {'type': 'string'}}, 'batchSize': {'type': 'number', 'description': 'Number of messages to process in each batch (default: 50)', 'items': None}}, 'required': ['messageIds']}}]}] format= error=None\n", + "\ud83d\udd27 Calling function: send_email\n", + "OK. I've sent the email.\n", + "\n" + ] + } + ], + "source": [ + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # Replace with your email\n", + "EMAIL_SUBJECT = \"Test Gemini + Gmail MCP Server\"\n", + "EMAIL_BODY = \"Hello World from Gemini!\"\n", + "\n", + "result = gemini_with_mcp_server(\n", + " mcp_server_url=gmail_mcp_server.server_url, \n", + " user_query=f\"Please send an email to {EMAIL_RECIPIENT} with subject {EMAIL_SUBJECT} and body {EMAIL_BODY}\"\n", + ")\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate Google's Gemini with function calling capabilities with Klavis MCP servers to create powerful AI applications. We covered practical examples and interactive features:\n", + "\n", + "**\ud83c\udfa5 YouTube Integration**: Built an AI assistant that can automatically summarize YouTube videos by extracting transcripts and providing detailed, timestamped summaries.\n", + "\n", + "**\ud83d\udce7 Gmail Integration**: Created an AI-powered email assistant that can send emails through Gmail with OAuth authentication.\n", + "\n", + "**\ud83d\udcac Interactive Chat**: Added multi-turn conversation capabilities that maintain context across interactions.\n", + "\n", + "### Key Takeaways:\n", + "- **Modern API**: Uses the latest `google-genai` library with improved type safety and performance\n", + "- **Easy Setup**: Klavis MCP servers can be created with just a few lines of code\n", + "- **Robust Function Calling**: Better error handling and response management\n", + "- **Conversation Context**: Maintains state across multiple interactions\n", + "- **Versatile**: Support for both simple APIs (YouTube) and OAuth-authenticated services (Gmail)\n", + "- **Scalable**: The same pattern can be applied to any of the MCP servers available in Klavis\n", + "- **Developer Friendly**: Enhanced logging and debugging capabilities\n", + "\n", + "### Next Steps:\n", + "- Try different MCP servers from Klavis (Notion, Slack, Airtable, etc.)\n", + "- Experiment with multi-modal capabilities using images and files\n", + "- Build more complex workflows with multiple function calls\n", + "- Integrate with your own applications and use cases\n", + "\n", + "**Happy building!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/google-genai/python/.env.example b/examples/google-genai/python/.env.example new file mode 100644 index 00000000..0717201c --- /dev/null +++ b/examples/google-genai/python/.env.example @@ -0,0 +1,2 @@ +GEMINI_API_KEY=your_google_api_key_here +KLAVIS_API_KEY=your_klavis_api_key_here \ No newline at end of file diff --git a/examples/google-genai/python/main.py b/examples/google-genai/python/main.py new file mode 100644 index 00000000..47f45bb8 --- /dev/null +++ b/examples/google-genai/python/main.py @@ -0,0 +1,134 @@ +import os +import webbrowser +from google import genai +from google.genai import types + +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +try: + from dotenv import load_dotenv + load_dotenv() +except ImportError: + pass + +def main(): + # Get API keys + gemini_api_key = os.getenv("GEMINI_API_KEY") + klavis_api_key = os.getenv("KLAVIS_API_KEY") + + if not gemini_api_key or not klavis_api_key: + print("Error: GEMINI_API_KEY or KLAVIS_API_KEY environment variable is not set") + return + + # Initialize clients + gemini_client = genai.Client(api_key=gemini_api_key) + klavis_client = Klavis(api_key=klavis_api_key) + + # Create MCP server instance + print("šŸ”§ Creating MCP server instance...") + mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.NOTION, + user_id="1234") + print("--- mcp_instance --- \n", mcp_instance) + + # Handle OAuth if needed + if hasattr(mcp_instance, 'oauth_url') and mcp_instance.oauth_url: + webbrowser.open(mcp_instance.oauth_url) + print(f"šŸ” Opening OAuth authorization: {mcp_instance.oauth_url}") + print("Please complete the OAuth authorization in your browser...") + input("Press Enter after completing OAuth authorization...") + + # Get tools from Klavis + mcp_tools = klavis_client.mcp_server.list_tools( + server_url=mcp_instance.server_url, + format=ToolFormat.GEMINI + ) + + contents = [] + + # Chat loop + while True: + try: + user_input = input("šŸ‘¤ You: ").strip() + + if user_input.lower() in ['quit', 'exit', 'q']: + break + + if not user_input: + continue + + contents.append(types.Content(role="user", parts=[types.Part(text=user_input)])) + + response = gemini_client.models.generate_content( + model='gemini-1.5-pro', + contents=contents, + config=types.GenerateContentConfig(tools=mcp_tools.tools) + ) + + if response.candidates and response.candidates[0].content.parts: + contents.append(response.candidates[0].content) + + # Check if there are function calls to execute + has_function_calls = False + for part in response.candidates[0].content.parts: + if hasattr(part, 'function_call') and part.function_call: + has_function_calls = True + print(f"\nšŸ”§ Calling function: {part.function_call.name}") + + try: + # Execute tool call via Klavis + function_result = klavis_client.mcp_server.call_tools( + server_url=mcp_instance.server_url, + tool_name=part.function_call.name, + tool_args=dict(part.function_call.args) + ) + + # Create function response in the proper format + function_response = {'result': function_result.result} + + except Exception as e: + print(f"Function call error: {e}") + function_response = {'error': str(e)} + + function_response_part = types.Part.from_function_response( + name=part.function_call.name, + response=function_response + ) + function_response_content = types.Content( + role='tool', + parts=[function_response_part] + ) + contents.append(function_response_content) + + if has_function_calls: + final_response = gemini_client.models.generate_content( + model='gemini-1.5-pro', + contents=contents, + config=types.GenerateContentConfig(tools=mcp_tools.tools) + ) + + # Add final response to conversation history + contents.append(final_response.candidates[0].content) + print(f"šŸ¤– Assistant: {final_response.text}") + else: + # No function calls, just display the response + print(f"šŸ¤– Assistant: {response.text}") + contents.append(response.candidates[0].content) + else: + # No response content, handle gracefully + print("No response generated.") + + print() # Add spacing + + except KeyboardInterrupt: + print("\n\nšŸ‘‹ Goodbye!") + break + except EOFError: + print("\n\nšŸ‘‹ Goodbye!") + break + except Exception as e: + print(f"\nāŒ Error: {e}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/google-genai/python/requirements.txt b/examples/google-genai/python/requirements.txt new file mode 100644 index 00000000..275b4205 --- /dev/null +++ b/examples/google-genai/python/requirements.txt @@ -0,0 +1,3 @@ +google-genai>=1.24.0 +klavis>=1.2.1 +python-dotenv>=1.1.1 \ No newline at end of file diff --git a/examples/google-genai/typescript/.env.example b/examples/google-genai/typescript/.env.example new file mode 100644 index 00000000..0717201c --- /dev/null +++ b/examples/google-genai/typescript/.env.example @@ -0,0 +1,2 @@ +GEMINI_API_KEY=your_google_api_key_here +KLAVIS_API_KEY=your_klavis_api_key_here \ No newline at end of file diff --git a/examples/google-genai/typescript/main.ts b/examples/google-genai/typescript/main.ts new file mode 100644 index 00000000..61595e45 --- /dev/null +++ b/examples/google-genai/typescript/main.ts @@ -0,0 +1,194 @@ +import * as dotenv from 'dotenv'; +import { GoogleGenAI, ToolListUnion } from '@google/genai'; +import { KlavisClient, Klavis } from 'klavis'; +import open from 'open'; +import * as readline from 'readline'; + +// Load environment variables +dotenv.config(); + +const geminiApiKey = process.env.GEMINI_API_KEY; +const klavisApiKey = process.env.KLAVIS_API_KEY; + +if (!geminiApiKey) { + throw new Error('GEMINI_API_KEY is not set in the environment variables.'); +} +if (!klavisApiKey) { + throw new Error('KLAVIS_API_KEY is not set in the environment variables.'); +} + +const geminiClient = new GoogleGenAI({ apiKey: geminiApiKey }); +const klavisClient = new KlavisClient({ apiKey: klavisApiKey }); + +// Create readline interface for user input +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +// Helper function to prompt user input +function promptUser(question: string): Promise { + return new Promise((resolve) => { + rl.question(question, (answer: string) => { + resolve(answer); + }); + }); +} + +async function main() { + try { + // Create MCP server instance + const mcpInstance = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Notion, + userId: "1234"}); + + console.log("--- mcp_instance ---", mcpInstance); + + // Handle OAuth if needed + if (mcpInstance.oauthUrl) { + console.log(`šŸ” Opening OAuth authorization: ${mcpInstance.oauthUrl}`); + await open(mcpInstance.oauthUrl); + console.log("Please complete the OAuth authorization in your browser..."); + await promptUser("Press Enter after completing OAuth authorization..."); + } + + // Get tools from Klavis + const mcpTools = await klavisClient.mcpServer.listTools({ + serverUrl: mcpInstance.serverUrl, + format: Klavis.ToolFormat.Gemini + }); + + const contents: any[] = []; + + // Extract function declarations from the Klavis response + const gemini_tools = mcpTools.tools as ToolListUnion; + const functionDeclarations = (gemini_tools[0] as any)?.function_declarations || []; + + console.log(`āœ… Loaded ${functionDeclarations.length} function declarations`); + + // Chat loop + while (true) { + try { + const userInput = await promptUser("šŸ‘¤ You: "); + + if (userInput.toLowerCase().trim() === 'quit' || + userInput.toLowerCase().trim() === 'exit' || + userInput.toLowerCase().trim() === 'q') { + break; + } + + if (!userInput.trim()) { + continue; + } + + contents.push({ + role: "user", + parts: [{ text: userInput }] + }); + + const response = await geminiClient.models.generateContent({ + model: 'gemini-2.5-flash', + contents: contents, + config: { + tools: [{ + functionDeclarations: functionDeclarations + }] + } + }); + + if (!response.candidates || !response.candidates[0]?.content?.parts) { + console.log("No response generated."); + continue; + } + + contents.push(response.candidates[0].content); + + // Check for function calls in the response + let hasFunctionCalls = false; + const functionCallResults: any[] = []; + + // Check if response has functionCalls property + if (response.functionCalls && response.functionCalls.length > 0) { + hasFunctionCalls = true; + for (const functionCall of response.functionCalls) { + console.log(`\nšŸ”§ Calling function: ${functionCall.name}`); + + try { + // Execute tool call via Klavis + const functionResult = await klavisClient.mcpServer.callTools({ + serverUrl: mcpInstance.serverUrl, + toolName: functionCall.name || '', + toolArgs: functionCall.args || {} + }); + + functionCallResults.push({ + functionResponse: { + name: functionCall.name, + response: functionResult.result + } + }); + } catch (error) { + console.error(`āŒ Function call error: ${error}`); + functionCallResults.push({ + functionResponse: { + name: functionCall.name, + response: { error: String(error) } + } + }); + } + } + } + + // If there were function calls, add the results and get final response + if (hasFunctionCalls && functionCallResults.length > 0) { + // Add function responses to conversation history + contents.push({ + role: 'tool', + parts: functionCallResults + }); + + // Get final response after function execution + const finalResponse = await geminiClient.models.generateContent({ + model: 'gemini-2.5-flash', + contents: contents, + config: { + tools: [{ + functionDeclarations: functionDeclarations + }], + temperature: 0 + } + }); + + // Add final response to conversation history + if (finalResponse.candidates && finalResponse.candidates[0]?.content) { + contents.push(finalResponse.candidates[0].content); + } + + console.log(`\nšŸ¤– Assistant: ${finalResponse.text || 'No response text'}`); + } else { + // No function calls, just display the response + console.log(`\nšŸ¤– Assistant: ${response.text || 'No response text'}`); + } + + } catch (error) { + console.error(`\nāŒ Error: ${error}`); + if (error instanceof Error) { + console.error(`Stack trace: ${error.stack}`); + } + } + } + + console.log("\n\nšŸ‘‹ Goodbye!"); + + } catch (error) { + console.error("āŒ Demo failed:", error); + if (error instanceof Error) { + console.error(`Stack trace: ${error.stack}`); + } + process.exit(1); + } finally { + rl.close(); + } +} + +main(); \ No newline at end of file diff --git a/examples/google-genai/typescript/package-lock.json b/examples/google-genai/typescript/package-lock.json new file mode 100644 index 00000000..e30c33b3 --- /dev/null +++ b/examples/google-genai/typescript/package-lock.json @@ -0,0 +1,1150 @@ +{ + "name": "gemini-example-typescript", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "gemini-example-typescript", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@google/genai": "^1.8.0", + "dotenv": "^16.4.5", + "klavis": "^0.3.1", + "open": "^8.4.0" + }, + "devDependencies": { + "@types/node": "^20.19.0", + "ts-node": "^10.9.2", + "typescript": "^5.5.3" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google/genai": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.8.0.tgz", + "integrity": "sha512-n3KiMFesQCy2R9iSdBIuJ0JWYQ1HZBJJkmt4PPZMGZKvlgHhBAGw1kUMyX+vsAIzprN3lK45DI755lm70wPOOg==", + "license": "Apache-2.0", + "dependencies": { + "google-auth-library": "^9.14.2", + "ws": "^8.18.0", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.22.4" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "@modelcontextprotocol/sdk": "^1.11.0" + }, + "peerDependenciesMeta": { + "@modelcontextprotocol/sdk": { + "optional": true + } + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.6.tgz", + "integrity": "sha512-uYssdp9z5zH5GQ0L4zEJ2ZuavYsJwkozjiUzCRfGtaaQcyjAMJ34aP8idv61QlqTozu6kudyr6JMq9Chf09dfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "license": "MIT", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/bignumber.js": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.3.0.tgz", + "integrity": "sha512-EM7aMFTXbptt/wZdMlBv2t8IViwQL+h6SLHosp8Yf0dqJMTnY6iL32opnAB6kAdL0SZPuvcAzFr31o0c/R3/RA==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formdata-node": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-6.0.3.tgz", + "integrity": "sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz", + "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^6.1.1", + "google-logging-utils": "^0.0.2", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/google-auth-library": { + "version": "9.15.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz", + "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==", + "license": "Apache-2.0", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-logging-utils": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz", + "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "license": "MIT", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/js-base64": { + "version": "3.7.7", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.7.tgz", + "integrity": "sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==", + "license": "BSD-3-Clause" + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "license": "MIT", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/jwa": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz", + "integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "^1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "license": "MIT", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/klavis": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/klavis/-/klavis-0.3.1.tgz", + "integrity": "sha512-t+pn4gt8vSSuAfU3LpoWSYHGLtAbQKPvDkFpUpzJs/3iqPzDTnJPoIfvwnQDZz/lJxcnszF7rLnOIX228QmpJw==", + "dependencies": { + "form-data": "^4.0.0", + "formdata-node": "^6.0.3", + "js-base64": "3.7.7", + "node-fetch": "^2.7.0", + "qs": "^6.13.1", + "readable-stream": "^4.5.2", + "url-join": "4.0.1" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "license": "MIT", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", + "license": "MIT" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/examples/google-genai/typescript/package.json b/examples/google-genai/typescript/package.json new file mode 100644 index 00000000..9c79ff24 --- /dev/null +++ b/examples/google-genai/typescript/package.json @@ -0,0 +1,26 @@ +{ + "name": "gemini-example-typescript", + "version": "1.0.0", + "description": "TypeScript example for Google GenAI with Klavis", + "main": "main.js", + "scripts": { + "start": "ts-node main.ts", + "demo": "ts-node demo.ts", + "build": "tsc", + "debug": "node --inspect-brk -r ts-node/register main.ts", + "debug:compiled": "tsc && node --inspect-brk dist/main.js" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@google/genai": "^1.8.0", + "dotenv": "^16.4.5", + "klavis": "^0.3.1", + "open": "^8.4.0" + }, + "devDependencies": { + "@types/node": "^20.19.0", + "ts-node": "^10.9.2", + "typescript": "^5.5.3" + } +} \ No newline at end of file diff --git a/examples/google-genai/typescript/tsconfig.json b/examples/google-genai/typescript/tsconfig.json new file mode 100644 index 00000000..51b451e2 --- /dev/null +++ b/examples/google-genai/typescript/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "./dist", + "rootDir": "./", + "sourceMap": true, + "declaration": true, + "removeComments": false + }, + "include": [ + "*.ts" + ], + "exclude": [ + "node_modules" + ] +} \ No newline at end of file diff --git a/examples/google_adk/python/README.md b/examples/google_adk/python/README.md new file mode 100644 index 00000000..4d6ae4e3 --- /dev/null +++ b/examples/google_adk/python/README.md @@ -0,0 +1,35 @@ +# Google ADK with Klavis MCP Integration + +## Create an Agent Project + +Run the ADK create command to start a new agent project: + +```bash +adk create my_agent +``` + +## Project Structure + +The created agent project has the following structure: + +``` +my_agent/ + agent.py # main agent code + .env # API keys or project IDs + __init__.py +``` + +## Update Your Agent + +The `agent.py` file contains a `root_agent` definition which is the only required element of an ADK agent. + +Add MCP tools via Klavis by creating a Strata server and connecting it to the agent using `McpToolset` with `StreamableHTTPConnectionParams`. See [`agent.py`](my_agent/agent.py) for a complete example integrating Gmail and Slack MCP servers. + +## Run Your Agent + +```bash +adk web +``` + +This will launch a web interface to interact with your agent. + diff --git a/examples/google_adk/python/my_agent/.env.example b/examples/google_adk/python/my_agent/.env.example new file mode 100644 index 00000000..bd851459 --- /dev/null +++ b/examples/google_adk/python/my_agent/.env.example @@ -0,0 +1,3 @@ +KLAVIS_API_KEY=YOUR_KLAVIS_API_KEY +GOOGLE_GENAI_USE_VERTEXAI=0 +GOOGLE_API_KEY=YOUR_GOOGLE_API_KEY \ No newline at end of file diff --git a/examples/google_adk/python/my_agent/__init__.py b/examples/google_adk/python/my_agent/__init__.py new file mode 100644 index 00000000..02c597e1 --- /dev/null +++ b/examples/google_adk/python/my_agent/__init__.py @@ -0,0 +1 @@ +from . import agent diff --git a/examples/google_adk/python/my_agent/agent.py b/examples/google_adk/python/my_agent/agent.py new file mode 100644 index 00000000..4fb8049f --- /dev/null +++ b/examples/google_adk/python/my_agent/agent.py @@ -0,0 +1,54 @@ +import os +import webbrowser + +from google.adk.agents.llm_agent import Agent +from google.adk.tools.mcp_tool import StreamableHTTPConnectionParams +from google.adk.tools.mcp_tool.mcp_toolset import McpToolset +from klavis import Klavis +from klavis.types import McpServerName + +from dotenv import load_dotenv +load_dotenv() + +KLAVIS_API_KEY = os.getenv("KLAVIS_API_KEY") + +# Initialize Klavis and set up Strata server +klavis_client = Klavis(api_key=KLAVIS_API_KEY) + +user_id = "user_123" + +# Create Strata server with multiple MCP servers +strata_response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id=user_id +) + +# Handle OAuth authentication +if strata_response.oauth_urls: + for server_name, oauth_url in strata_response.oauth_urls.items(): + user_integration_auth = klavis_client.user.get_user_auth( + user_id=user_id, + server_name=server_name + ) + if not user_integration_auth.is_authenticated: + print(f"šŸ” Opening OAuth for {server_name}...") + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + +mcp_server_url = strata_response.strata_server_url + +# Create AI agent with MCP toolset (exposed at module level for ADK) +root_agent = Agent( + name="my_agent", + model="gemini-2.5-flash", + description="An agent with access to tools through Klavis MCP", + instruction="You are a helpful assistant with access to MCP tools.", + tools=[ + McpToolset( + connection_params=StreamableHTTPConnectionParams( + url=mcp_server_url, + ), + ) + ], +) + diff --git a/examples/google_adk/python/requirements.txt b/examples/google_adk/python/requirements.txt new file mode 100644 index 00000000..2a8545b9 --- /dev/null +++ b/examples/google_adk/python/requirements.txt @@ -0,0 +1,3 @@ +google-adk>=1.16.0 +klavis>=2.11.1 +python-dotenv>=1.1.0 \ No newline at end of file diff --git a/examples/llamaindex/Use_Klavis_with_LlamaIndex.ipynb b/examples/llamaindex/Use_Klavis_with_LlamaIndex.ipynb new file mode 100644 index 00000000..4d34c431 --- /dev/null +++ b/examples/llamaindex/Use_Klavis_with_LlamaIndex.ipynb @@ -0,0 +1,405 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/llamaindex/cookbook/blob/main/integrations/Klavis/Use_Klavis_with_LlamaIndex.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# LlamaIndex + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to build AI agents using LlamaIndex's agent framework with Klavis MCP (Model Context Protocol) servers for enhanced functionality. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m25.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU llama-index llama-index-tools-mcp klavis" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.tools.mcp import BasicMCPClient\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Case 1: YouTube AI Agent\n", + "\n", + "#### Create an AI agent to summarize YouTube videos using LlamaIndex and Klavis MCP Server." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 1 - using Klavis to create youtube MCP Server" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd17 YouTube MCP server created at: https://youtube-mcp-server.klavis.ai/sse?instance_id=270cbd51-e737-407d-85ce-6e6162248671\n" + ] + } + ], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create a YouTube MCP server and get the server URL\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "youtube_mcp_server_url = youtube_mcp_instance.server_url\n", + "# print(f\"\ud83d\udd17 YouTube MCP server created at: {youtube_mcp_server_url}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 2 - using Llamaindex to create AI Agent with the MCP Server " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.agent.workflow import FunctionAgent\n", + "from llama_index.tools.mcp import (\n", + " get_tools_from_mcp_url,\n", + " aget_tools_from_mcp_url,\n", + ")\n", + "\n", + "llm = OpenAI(model=\"gpt-4o-mini\", api_key=os.getenv(\"OPENAI_API_KEY\"))\n", + "\n", + "youtube_tools = await aget_tools_from_mcp_url(https://github.com/youtube_mcp_server_url,%20client=BasicMCPClient(youtube_mcp_server_url))\n", + "\n", + "youtube_agent = FunctionAgent(\n", + " name=\"youtube_agent\",\n", + " description=\"Agent using MCP-based tools\",\n", + " tools=youtube_tools,\n", + " llm=llm,\n", + " system_prompt=\"You are an AI assistant that uses MCP tools.\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 3 - Run your AI Agent to summarize your favorite video!" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The video titled \"Introducing AgentWorkflow, a way to easily create multi-agent systems in Llamaindex\" presents a new system called AgentWorkflow designed for building and orchestrating AI agent systems. It emphasizes the ability to coordinate multiple AI agents while maintaining state and context, making it suitable for both single specialized agents and teams working together.\n", + "\n", + "### Key Features:\n", + "- **Flexible Agent Types**: Includes FunctionAgent and ReActAgent.\n", + "- **Built-in State Management**: Helps in managing the state of agents effectively.\n", + "- **Real-time Monitoring**: Allows users to monitor the agents in action.\n", + "- **Human-in-the-loop Capabilities**: Facilitates human oversight in the agent processes.\n", + "\n", + "The video encourages viewers to explore comprehensive tutorials and documentation to learn how to build everything from simple assistants to complex multi-agent systems. \n", + "\n", + "### Additional Resources:\n", + "- Basic Tutorial: [Basic Tutorial](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)\n", + "- Full Documentation: [Full docs](https://docs.llamaindex.ai/en/stable/understanding/agent/multi_agents/)\n", + "- Introductory Blog Post: [Blog Post](https://www.llamaindex.ai/blog/introducing-agentworkflow-a-powerful-system-for-building-ai-agent-systems)\n", + "- Discord Community: [Join Discord](https://discord.com/invite/eN6D2HQ4aX)\n", + "\n", + "The video has a duration of approximately 16 minutes and 38 seconds and has garnered over 7,161 views.\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=MmiveeGxfX0&t=528s\" # pick a video you like!\n", + "\n", + "response = await youtube_agent.run(f\"Summarize this video: {YOUTUBE_VIDEO_URL}\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\u2705 Nice work! You\u2019ve successfully oursource your eyeball and summarized your favorite YouTube video!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Case 2: Multi-Agent Workflow\n", + "\n", + "#### Build a LlamaIndex AgentWorkflow that summarizes YouTube videos and sends the summary via email." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 1 - using Klavis to create YouTube and Gmail MCP Servers" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Created YouTube and Gmail MCP instances\n", + "\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: https://api.klavis.ai/oauth/gmail/authorize?instance_id=d9d482b3-433a-4330-9a8b-9548c0b0a326\n" + ] + } + ], + "source": [ + "import webbrowser\n", + "\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create YouTube MCP server\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# Create Gmail MCP server with OAuth authorization\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "print(\"\u2705 Created YouTube and Gmail MCP instances\")\n", + "\n", + "# Open Gmail OAuth authorization\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: {gmail_mcp_instance.oauth_url}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - using LlamaIndex to create Multi-Agent Workflow with the MCP Servers\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83e\udd16 Multi-agent workflow created with YouTube and Gmail agents!\n" + ] + } + ], + "source": [ + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core.agent.workflow import FunctionAgent, AgentWorkflow\n", + "from llama_index.tools.mcp import (\n", + " BasicMCPClient,\n", + " get_tools_from_mcp_url,\n", + " aget_tools_from_mcp_url,\n", + ")\n", + "\n", + "llm = OpenAI(model=\"gpt-4o-mini\", api_key=os.getenv(\"OPENAI_API_KEY\"))\n", + "\n", + "# Get MCP server URLs\n", + "youtube_mcp_server_url = youtube_mcp_instance.server_url\n", + "gmail_mcp_server_url = gmail_mcp_instance.server_url\n", + "\n", + "# Get tools from both MCP servers\n", + "youtube_tools = await aget_tools_from_mcp_url(https://github.com/youtube_mcp_server_url,%20client=BasicMCPClient(youtube_mcp_server_url))\n", + "gmail_tools = await aget_tools_from_mcp_url(https://github.com/gmail_mcp_server_url,%20client=BasicMCPClient(gmail_mcp_server_url))\n", + "\n", + "# Create specialized agents\n", + "youtube_agent = FunctionAgent(\n", + " name=\"youtube_agent\",\n", + " description=\"Agent that can summarize YouTube videos\",\n", + " tools=youtube_tools,\n", + " llm=llm,\n", + " system_prompt=\"You are a YouTube video summarization expert. Use MCP tools to analyze and summarize videos.\",\n", + " can_handoff_to=[\"gmail_agent\"],\n", + ")\n", + "\n", + "gmail_agent = FunctionAgent(\n", + " name=\"gmail_agent\", \n", + " description=\"Agent that can send emails via Gmail\",\n", + " tools=gmail_tools,\n", + " llm=llm,\n", + " system_prompt=\"You are an email assistant. Use MCP tools to send emails via Gmail.\"\n", + ")\n", + "\n", + "# Create multi-agent workflow\n", + "workflow = AgentWorkflow(\n", + " agents=[youtube_agent, gmail_agent],\n", + " root_agent=\"youtube_agent\",\n", + ")\n", + "\n", + "print(\"\ud83e\udd16 Multi-agent workflow created with YouTube and Gmail agents!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 3 - run the workflow!" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u2705 Report:\n", + " The summary of the video \"Introducing AgentWorkflow, a way to easily create multi-agent systems in Llamaindex\" has been successfully sent to zihaolin@klavis.ai. If you need anything else, feel free to ask!\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=MmiveeGxfX0&t=528s\" # pick a video you like!\n", + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # Replace with your email\n", + "\n", + "resp = await workflow.run(user_msg=f\"Summarize this video {YOUTUBE_VIDEO_URL} and send it to {EMAIL_RECIPIENT}\")\n", + "print(\"\\n\u2705 Report:\\n\", resp.response.content)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "\n", + "## Summary\n", + "\n", + "In this tutorial, we explored how to integrate LlamaIndex with Klavis AI to build powerful AI agents using MCP (Model Context Protocol) servers. Here's what we accomplished:\n", + "\n", + "### Key Takeaways:\n", + "\n", + "1. **Single Agent Setup**: Created a YouTube AI agent that can summarize videos using the Klavis YouTube MCP server\n", + "2. **Multi-Agent Workflow**: Built a sophisticated workflow combining YouTube and Gmail agents to summarize videos and automatically send summaries via email\n", + "3. **MCP Integration**: Learned how to use Klavis MCP servers with LlamaIndex's agent framework for enhanced functionality\n", + "\n", + "This integration opens up endless possibilities for building AI agents that can interact with various services and platforms through Klavis MCP servers. You can now create agents that work with YouTube, Gmail, GitHub, Slack, and many other services supported by Klavis.\n", + "\n", + "Happy building! \ud83d\ude80" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (cookbook-venv)", + "language": "python", + "name": "cookbook-venv" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/mastra-klavis/.env.example b/examples/mastra-klavis/.env.example new file mode 100644 index 00000000..26e65379 --- /dev/null +++ b/examples/mastra-klavis/.env.example @@ -0,0 +1,2 @@ +KLAVIS_API_KEY=YOUR_KLAVIS_API_KEY +OPENAI_API_KEY=YOUR_OPENAI_API_KEY diff --git a/examples/mastra-klavis/.gitignore b/examples/mastra-klavis/.gitignore new file mode 100644 index 00000000..44c9a120 --- /dev/null +++ b/examples/mastra-klavis/.gitignore @@ -0,0 +1,5 @@ +node_modules +dist +.mastra +.env + diff --git a/examples/mastra-klavis/README.md b/examples/mastra-klavis/README.md new file mode 100644 index 00000000..99359782 --- /dev/null +++ b/examples/mastra-klavis/README.md @@ -0,0 +1,47 @@ +# Mastra + Klavis Example + +This example demonstrates how to integrate Mastra with Klavis to create an AI agent that can interact with Gmail through MCP (Model Context Protocol). + +## What it does + +The example creates a Gmail MCP Agent that: +- Connects to a Klavis-hosted Gmail MCP server +- Provides tools to read, send, search emails, and manage labels +- Uses OpenAI's GPT-4o-mini model for natural language processing + +## Prerequisites + +1. A Klavis API key (set as `KLAVIS_API_KEY` environment variable) +2. Node.js installed on your system + +## Installation + +```bash +npm install +``` + +## Usage + +1. Copy the environment file and add your Klavis and OpenAI API key: +```bash +cp .env.example .env +``` + +2. Run the example: +```bash +npm run dev +``` + +3. The system will automatically open your browser for Gmail OAuth authorization + +## Key Components + +- **Agent Creation**: Creates a Gmail MCP agent using Mastra's framework +- **OAuth Flow**: Handles Gmail authentication through Klavis +- **Tool Integration**: Provides Gmail tools through the MCP protocol +- **AI Model**: Uses OpenAI's GPT-4o-mini for natural language understanding + +## Learn More + +- [Mastra Documentation](https://mastra.ai) +- [Klavis Documentation](https://klavis.ai) \ No newline at end of file diff --git a/examples/mastra-klavis/package-lock.json b/examples/mastra-klavis/package-lock.json new file mode 100644 index 00000000..62de8096 --- /dev/null +++ b/examples/mastra-klavis/package-lock.json @@ -0,0 +1,9697 @@ +{ + "name": "mastra-klavis-example", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "mastra-klavis-example", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@ai-sdk/openai": "^1.3.23", + "@mastra/core": "^0.21.1", + "@mastra/mcp": "^0.13.5", + "dotenv": "^17.2.0", + "klavis": "^2.10.2", + "open": "^10.1.0" + }, + "devDependencies": { + "@types/node": "^24.0.13", + "mastra": "^0.17.0", + "tsx": "^4.19.0", + "typescript": "^5.8.3" + } + }, + "node_modules/@a2a-js/sdk": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/@a2a-js/sdk/-/sdk-0.2.5.tgz", + "integrity": "sha512-VTDuRS5V0ATbJ/LkaQlisMnTAeYKXAK6scMguVBstf+KIBQ7HIuKhiXLv+G/hvejkV+THoXzoNifInAkU81P1g==", + "dependencies": { + "@types/cors": "^2.8.17", + "@types/express": "^4.17.23", + "body-parser": "^2.2.0", + "cors": "^2.8.5", + "express": "^4.21.2", + "uuid": "^11.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/anthropic-v5": { + "name": "@ai-sdk/anthropic", + "version": "2.0.23", + "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.23.tgz", + "integrity": "sha512-ZEBiiv1UhjGjBwUU63pFhLK5LCSlNDb1idY9K1oZHm5/Fda1cuTojf32tOp0opH0RPbPAN/F8fyyNjbU33n9Kw==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/anthropic-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/anthropic-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/gateway": { + "version": "1.0.33", + "resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-1.0.33.tgz", + "integrity": "sha512-v9i3GPEo4t3fGcSkQkc07xM6KJN75VUv7C1Mqmmsu2xD8lQwnQfsrgAXyNuWe20yGY0eHuheSPDZhiqsGKtH1g==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10", + "@vercel/oidc": "^3.0.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/google-v5": { + "name": "@ai-sdk/google", + "version": "2.0.17", + "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.17.tgz", + "integrity": "sha512-6LyuUrCZuiULg0rUV+kT4T2jG19oUntudorI4ttv1ARkSbwl8A39ue3rA487aDDy6fUScdbGFiV5Yv/o4gidVA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/google-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/google-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai": { + "version": "1.3.24", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-1.3.24.tgz", + "integrity": "sha512-GYXnGJTHRTZc4gJMSmFRgEQudjqd4PUN0ZjQhPwOAYH1yOAvQoG/Ikqs+HyISRbLPCrhbZnPKCNHuRU4OfpW0Q==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "1.1.3", + "@ai-sdk/provider-utils": "2.2.8" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, + "node_modules/@ai-sdk/openai-compatible": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-1.0.19.tgz", + "integrity": "sha512-hnsqPCCSNKgpZRNDOAIXZs7OcUDM4ut5ggWxj2sjB4tNL/aBn/xrM7pJkqu+WuPowyrE60wPVSlw0LvtXAlMXQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai-compatible-v5": { + "name": "@ai-sdk/openai-compatible", + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-1.0.19.tgz", + "integrity": "sha512-hnsqPCCSNKgpZRNDOAIXZs7OcUDM4ut5ggWxj2sjB4tNL/aBn/xrM7pJkqu+WuPowyrE60wPVSlw0LvtXAlMXQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai-compatible-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/openai-compatible-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai-compatible/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/openai-compatible/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai-v5": { + "name": "@ai-sdk/openai", + "version": "2.0.42", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.42.tgz", + "integrity": "sha512-9mM6QS8k0ooH9qMC27nlrYLQmNDnO6Rk0JTmFo/yUxpABEWOcvQhMWNHbp9lFL6Ty5vkdINrujhsAQfWuEleOg==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/openai-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/openai-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/provider": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.3.tgz", + "integrity": "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/provider-utils": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.8.tgz", + "integrity": "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "1.1.3", + "nanoid": "^3.3.8", + "secure-json-parse": "^2.7.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.23.8" + } + }, + "node_modules/@ai-sdk/provider-utils-v5": { + "name": "@ai-sdk/provider-utils", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/provider-utils-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/provider-v5": { + "name": "@ai-sdk/provider", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/react": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.12.tgz", + "integrity": "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider-utils": "2.2.8", + "@ai-sdk/ui-utils": "1.2.11", + "swr": "^2.2.5", + "throttleit": "2.1.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "react": "^18 || ^19 || ^19.0.0-rc", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/ui-utils": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.11.tgz", + "integrity": "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "1.1.3", + "@ai-sdk/provider-utils": "2.2.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.23.8" + } + }, + "node_modules/@ai-sdk/xai-v5": { + "name": "@ai-sdk/xai", + "version": "2.0.23", + "resolved": "https://registry.npmjs.org/@ai-sdk/xai/-/xai-2.0.23.tgz", + "integrity": "sha512-Xo4r5W/Wvi4mkCD98DoafNxj9V3xysUlWOeqAYpqKeKkNQ2xtOTly2MHq+gP6wKud0Y/mI7hemkCMQgH6HOwzQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/openai-compatible": "1.0.19", + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/xai-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/xai-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@apidevtools/json-schema-ref-parser": { + "version": "14.2.1", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-14.2.1.tgz", + "integrity": "sha512-HmdFw9CDYqM6B25pqGBpNeLCKvGPlIx1EbLrVL0zPvj50CJQUHyBNBw45Muk0kEIkogo1VZvOKHajdMuAzSxRg==", + "license": "MIT", + "dependencies": { + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">= 20" + }, + "funding": { + "url": "https://github.com/sponsors/philsturgeon" + }, + "peerDependencies": { + "@types/json-schema": "^7.0.15" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz", + "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz", + "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.4", + "@babel/types": "^7.28.4", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", + "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz", + "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", + "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", + "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", + "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz", + "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typescript": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz", + "integrity": "sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz", + "integrity": "sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-validator-option": "^7.27.1", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-transform-modules-commonjs": "^7.27.1", + "@babel/plugin-transform-typescript": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@clack/core": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.5.0.tgz", + "integrity": "sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow==", + "dev": true, + "license": "MIT", + "dependencies": { + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@clack/prompts": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.11.0.tgz", + "integrity": "sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@clack/core": "0.5.0", + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@dmsnell/diff-match-patch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@dmsnell/diff-match-patch/-/diff-match-patch-1.1.0.tgz", + "integrity": "sha512-yejLPmM5pjsGvxS9gXablUSbInW7H976c/FJ4iQxWIm7/38xBySRemTPDe34lhg1gVLbJntX0+sH0jYfU+PN9A==", + "license": "Apache-2.0" + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", + "integrity": "sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.10.tgz", + "integrity": "sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.10.tgz", + "integrity": "sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.10.tgz", + "integrity": "sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz", + "integrity": "sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz", + "integrity": "sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.10.tgz", + "integrity": "sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.10.tgz", + "integrity": "sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.10.tgz", + "integrity": "sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.10.tgz", + "integrity": "sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.10.tgz", + "integrity": "sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.10.tgz", + "integrity": "sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.10.tgz", + "integrity": "sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.10.tgz", + "integrity": "sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.10.tgz", + "integrity": "sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.10.tgz", + "integrity": "sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz", + "integrity": "sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.10.tgz", + "integrity": "sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.10.tgz", + "integrity": "sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.10.tgz", + "integrity": "sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.10.tgz", + "integrity": "sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.10.tgz", + "integrity": "sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.10.tgz", + "integrity": "sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.10.tgz", + "integrity": "sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.10.tgz", + "integrity": "sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz", + "integrity": "sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@expo/devcert": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@expo/devcert/-/devcert-1.2.0.tgz", + "integrity": "sha512-Uilcv3xGELD5t/b0eM4cxBFEKQRIivB3v7i+VhWLV/gL98aw810unLKKJbGAxAIhY6Ipyz8ChWibFsKFXYwstA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@expo/sudo-prompt": "^9.3.1", + "debug": "^3.1.0", + "glob": "^10.4.2" + } + }, + "node_modules/@expo/devcert/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@expo/sudo-prompt": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/@expo/sudo-prompt/-/sudo-prompt-9.3.2.tgz", + "integrity": "sha512-HHQigo3rQWKMDzYDLkubN5WQOYXJJE2eNqIQC2axC2iO3mHdwnIR7FgZVvHWtBwAdzBgAP0ECp8KqS8TiMKvgw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@grpc/grpc-js": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.0.tgz", + "integrity": "sha512-N8Jx6PaYzcTRNzirReJCtADVoq4z7+1KQ4E70jTg/koQiMoUSN1kbNjPOqpPbhMFhfU1/l7ixspPl8dNY+FoUg==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.8.0", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz", + "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.5.3", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/ttlcache": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@isaacs/ttlcache/-/ttlcache-1.4.1.tgz", + "integrity": "sha512-RQgQ4uQ+pLbqXfOmieB91ejmLwvSgv9nLx6sT6sD83s7umBypgg+OIBOBbEUiJXrfpnp9j0mRhYYdzp9uqq3lA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@jsdevtools/ono": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", + "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==", + "license": "MIT" + }, + "node_modules/@mastra/core": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@mastra/core/-/core-0.21.1.tgz", + "integrity": "sha512-lyASrQCLiW+tleVRL16pBN5YfBg14IBEXYl2uGmrOsJ0QiPX/pHzcmYe0n7Oh48dYzu9YB/MgbQuY5PLzks8uQ==", + "license": "Apache-2.0", + "dependencies": { + "@a2a-js/sdk": "~0.2.4", + "@ai-sdk/anthropic-v5": "npm:@ai-sdk/anthropic@2.0.23", + "@ai-sdk/google-v5": "npm:@ai-sdk/google@2.0.17", + "@ai-sdk/openai-compatible-v5": "npm:@ai-sdk/openai-compatible@1.0.19", + "@ai-sdk/openai-v5": "npm:@ai-sdk/openai@2.0.42", + "@ai-sdk/provider": "^1.1.3", + "@ai-sdk/provider-utils": "^2.2.8", + "@ai-sdk/provider-utils-v5": "npm:@ai-sdk/provider-utils@3.0.10", + "@ai-sdk/provider-v5": "npm:@ai-sdk/provider@2.0.0", + "@ai-sdk/ui-utils": "^1.2.11", + "@ai-sdk/xai-v5": "npm:@ai-sdk/xai@2.0.23", + "@isaacs/ttlcache": "^1.4.1", + "@mastra/schema-compat": "0.11.4", + "@openrouter/ai-sdk-provider-v5": "npm:@openrouter/ai-sdk-provider@1.2.0", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/auto-instrumentations-node": "^0.62.1", + "@opentelemetry/core": "^2.0.1", + "@opentelemetry/exporter-trace-otlp-grpc": "^0.203.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.203.0", + "@opentelemetry/otlp-exporter-base": "^0.203.0", + "@opentelemetry/otlp-transformer": "^0.203.0", + "@opentelemetry/resources": "^2.0.1", + "@opentelemetry/sdk-metrics": "^2.0.1", + "@opentelemetry/sdk-node": "^0.203.0", + "@opentelemetry/sdk-trace-base": "^2.0.1", + "@opentelemetry/sdk-trace-node": "^2.0.1", + "@opentelemetry/semantic-conventions": "^1.36.0", + "@sindresorhus/slugify": "^2.2.1", + "ai": "^4.3.19", + "ai-v5": "npm:ai@5.0.60", + "date-fns": "^3.6.0", + "dotenv": "^16.6.1", + "hono": "^4.9.7", + "hono-openapi": "^0.4.8", + "js-tiktoken": "^1.0.20", + "json-schema": "^0.4.0", + "json-schema-to-zod": "^2.6.1", + "p-map": "^7.0.3", + "p-retry": "^7.1.0", + "pino": "^9.7.0", + "pino-pretty": "^13.0.0", + "radash": "^12.1.1", + "sift": "^17.1.3", + "xstate": "^5.20.1", + "zod-to-json-schema": "^3.24.6" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/@mastra/core/node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/@mastra/deployer": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@mastra/deployer/-/deployer-0.21.1.tgz", + "integrity": "sha512-bkwARFO9PRCvVzCsTYGOP/JpYem9QEYqSEjv8i1MdH/6u+GK4XqbCufO5NLM014aAN3h5OrTIcdImuy2rnih6w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@babel/core": "^7.28.4", + "@babel/helper-module-imports": "^7.27.1", + "@babel/preset-typescript": "^7.27.1", + "@mastra/server": "^0.21.1", + "@neon-rs/load": "^0.1.82", + "@optimize-lodash/rollup-plugin": "^5.0.2", + "@rollup/plugin-alias": "5.1.1", + "@rollup/plugin-commonjs": "28.0.6", + "@rollup/plugin-esm-shim": "0.1.8", + "@rollup/plugin-json": "6.1.0", + "@rollup/plugin-node-resolve": "16.0.2", + "@rollup/plugin-virtual": "3.0.2", + "@sindresorhus/slugify": "^2.2.1", + "builtins": "^5.1.0", + "detect-libc": "^2.0.4", + "dotenv": "^16.6.1", + "empathic": "^2.0.0", + "esbuild": "^0.25.10", + "find-workspaces": "^0.3.1", + "fs-extra": "^11.3.2", + "hono": "^4.9.7", + "local-pkg": "^1.1.2", + "resolve-from": "^5.0.0", + "resolve.exports": "^2.0.3", + "rollup": "~4.50.2", + "rollup-plugin-esbuild": "^6.2.1", + "rollup-plugin-node-externals": "^8.0.1", + "tinyglobby": "^0.2.15", + "typescript-paths": "^1.5.1" + }, + "peerDependencies": { + "@mastra/core": ">=0.21.0-0 <0.22.0-0", + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/@mastra/deployer/node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/@mastra/loggers": { + "version": "0.10.16", + "resolved": "https://registry.npmjs.org/@mastra/loggers/-/loggers-0.10.16.tgz", + "integrity": "sha512-zZiaz/kfChdA9L8ks0F73pn/kh6gU8FkS8VMigKH5uc7Ez8gvWsaC9Orgg5k7Og4bkXrZ3j2fN5RewtjI8appA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "pino": "^9.7.0", + "pino-pretty": "^13.0.0" + }, + "peerDependencies": { + "@mastra/core": ">=0.18.1-0 <0.22.0-0" + } + }, + "node_modules/@mastra/mcp": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/@mastra/mcp/-/mcp-0.13.5.tgz", + "integrity": "sha512-GFHOHTbUY8PuEwzObIVCaHF855M4S4xIOnNJE1WJeEsPvWOllyL+y1Wz3HGjusrAfZIDrZ8j8Z5Diqxf3KTW/A==", + "license": "Apache-2.0", + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^14.2.1", + "@modelcontextprotocol/sdk": "^1.17.5", + "date-fns": "^4.1.0", + "exit-hook": "^4.0.0", + "fast-deep-equal": "^3.1.3", + "uuid": "^11.1.0", + "zod-from-json-schema": "^0.5.0", + "zod-from-json-schema-v3": "npm:zod-from-json-schema@^0.0.5" + }, + "peerDependencies": { + "@mastra/core": ">=0.20.1-0 <0.22.0-0", + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/@mastra/mcp/node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/@mastra/schema-compat": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/@mastra/schema-compat/-/schema-compat-0.11.4.tgz", + "integrity": "sha512-oh3+enP3oYftZlmJAKQQj5VXR86KgTMwfMnwALZyLk04dPSWfVD2wGytoDg5Qbi3rX9qHj6g0rMNa0CUjR6aTg==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0", + "zod-from-json-schema": "^0.5.0", + "zod-from-json-schema-v3": "npm:zod-from-json-schema@^0.0.5", + "zod-to-json-schema": "^3.24.6" + }, + "peerDependencies": { + "ai": "^4.0.0 || ^5.0.0", + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/@mastra/server": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@mastra/server/-/server-0.21.1.tgz", + "integrity": "sha512-qf14MT+lGiPAgv7oqOC2+bcOa5jED1Ucmlm66y7TZKywUr1n8qDNePoGj+RTU1Nz/oLI0NloNYu3kCUO2GW/sA==", + "dev": true, + "license": "Apache-2.0", + "peerDependencies": { + "@mastra/core": ">=0.21.0-0 <0.22.0-0", + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.20.1.tgz", + "integrity": "sha512-j/P+yuxXfgxb+mW7OEoRCM3G47zCTDqUPivJo/VzpjbG8I9csTXtOprCf5FfOfHK4whOJny0aHuBEON+kS7CCA==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@neon-rs/load": { + "version": "0.1.82", + "resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.1.82.tgz", + "integrity": "sha512-H4Gu2o5kPp+JOEhRrOQCnJnf7X6sv9FBLttM/wSbb4efsgFWeHzfU/ItZ01E5qqEk+U6QGdeVO7lxXIAtYHr5A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@openrouter/ai-sdk-provider-v5": { + "name": "@openrouter/ai-sdk-provider", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.2.0.tgz", + "integrity": "sha512-stuIwq7Yb7DNmk3GuCtz+oS3nZOY4TXEV3V5KsknDGQN7Fpu3KRMQVWRc1J073xKdf0FC9EHOctSyzsACmp5Ag==", + "license": "Apache-2.0", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "ai": "^5.0.0", + "zod": "^3.24.1 || ^v4" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/api-logs": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.203.0.tgz", + "integrity": "sha512-9B9RU0H7Ya1Dx/Rkyc4stuBZSGVQF27WigitInx2QQoj6KUpEFYPKoWjdFTunJYxmXmh17HeBvbMa1EhGyPmqQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.3.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/auto-instrumentations-node": { + "version": "0.62.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/auto-instrumentations-node/-/auto-instrumentations-node-0.62.2.tgz", + "integrity": "sha512-Ipe6X7ddrCiRsuewyTU83IvKiSFT4piqmv9z8Ovg1E7v98pdTj1pUE6sDrHV50zl7/ypd+cONBgt+EYSZu4u9Q==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/instrumentation-amqplib": "^0.50.0", + "@opentelemetry/instrumentation-aws-lambda": "^0.54.1", + "@opentelemetry/instrumentation-aws-sdk": "^0.58.0", + "@opentelemetry/instrumentation-bunyan": "^0.49.0", + "@opentelemetry/instrumentation-cassandra-driver": "^0.49.0", + "@opentelemetry/instrumentation-connect": "^0.47.0", + "@opentelemetry/instrumentation-cucumber": "^0.19.0", + "@opentelemetry/instrumentation-dataloader": "^0.21.1", + "@opentelemetry/instrumentation-dns": "^0.47.0", + "@opentelemetry/instrumentation-express": "^0.52.0", + "@opentelemetry/instrumentation-fastify": "^0.48.0", + "@opentelemetry/instrumentation-fs": "^0.23.0", + "@opentelemetry/instrumentation-generic-pool": "^0.47.0", + "@opentelemetry/instrumentation-graphql": "^0.51.0", + "@opentelemetry/instrumentation-grpc": "^0.203.0", + "@opentelemetry/instrumentation-hapi": "^0.50.0", + "@opentelemetry/instrumentation-http": "^0.203.0", + "@opentelemetry/instrumentation-ioredis": "^0.51.0", + "@opentelemetry/instrumentation-kafkajs": "^0.13.0", + "@opentelemetry/instrumentation-knex": "^0.48.0", + "@opentelemetry/instrumentation-koa": "^0.51.0", + "@opentelemetry/instrumentation-lru-memoizer": "^0.48.0", + "@opentelemetry/instrumentation-memcached": "^0.47.0", + "@opentelemetry/instrumentation-mongodb": "^0.56.0", + "@opentelemetry/instrumentation-mongoose": "^0.50.0", + "@opentelemetry/instrumentation-mysql": "^0.49.0", + "@opentelemetry/instrumentation-mysql2": "^0.50.0", + "@opentelemetry/instrumentation-nestjs-core": "^0.49.0", + "@opentelemetry/instrumentation-net": "^0.47.0", + "@opentelemetry/instrumentation-oracledb": "^0.29.0", + "@opentelemetry/instrumentation-pg": "^0.56.1", + "@opentelemetry/instrumentation-pino": "^0.50.1", + "@opentelemetry/instrumentation-redis": "^0.52.0", + "@opentelemetry/instrumentation-restify": "^0.49.0", + "@opentelemetry/instrumentation-router": "^0.48.0", + "@opentelemetry/instrumentation-runtime-node": "^0.17.1", + "@opentelemetry/instrumentation-socket.io": "^0.50.0", + "@opentelemetry/instrumentation-tedious": "^0.22.0", + "@opentelemetry/instrumentation-undici": "^0.14.0", + "@opentelemetry/instrumentation-winston": "^0.48.1", + "@opentelemetry/resource-detector-alibaba-cloud": "^0.31.3", + "@opentelemetry/resource-detector-aws": "^2.3.0", + "@opentelemetry/resource-detector-azure": "^0.10.0", + "@opentelemetry/resource-detector-container": "^0.7.3", + "@opentelemetry/resource-detector-gcp": "^0.37.0", + "@opentelemetry/resources": "^2.0.0", + "@opentelemetry/sdk-node": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.4.1", + "@opentelemetry/core": "^2.0.0" + } + }, + "node_modules/@opentelemetry/context-async-hooks": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.1.0.tgz", + "integrity": "sha512-zOyetmZppnwTyPrt4S7jMfXiSX9yyfF0hxlA8B5oo2TtKl+/RGCy7fi4DrBfIf3lCPrkKsRBWZZD7RFojK7FDg==", + "license": "Apache-2.0", + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.1.0.tgz", + "integrity": "sha512-RMEtHsxJs/GiHHxYT58IY57UXAQTuUnZVco6ymDEqTNlJKTimM4qPUPVe8InNFyBjhHBEAx4k3Q8LtNayBsbUQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-grpc": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-grpc/-/exporter-logs-otlp-grpc-0.203.0.tgz", + "integrity": "sha512-g/2Y2noc/l96zmM+g0LdeuyYKINyBwN6FJySoU15LHPLcMN/1a0wNk2SegwKcxrRdE7Xsm7fkIR5n6XFe3QpPw==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-grpc-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/sdk-logs": "0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-grpc/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-http": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-http/-/exporter-logs-otlp-http-0.203.0.tgz", + "integrity": "sha512-s0hys1ljqlMTbXx2XiplmMJg9wG570Z5lH7wMvrZX6lcODI56sG4HL03jklF63tBeyNwK2RV1/ntXGo3HgG4Qw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/sdk-logs": "0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-http/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-logs-otlp-proto/-/exporter-logs-otlp-proto-0.203.0.tgz", + "integrity": "sha512-nl/7S91MXn5R1aIzoWtMKGvqxgJgepB/sH9qW0rZvZtabnsjbf8OQ1uSx3yogtvLr0GzwD596nQKz2fV7q2RBw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-logs": "0.203.0", + "@opentelemetry/sdk-trace-base": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-logs-otlp-proto/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-grpc": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-metrics-otlp-grpc/-/exporter-metrics-otlp-grpc-0.203.0.tgz", + "integrity": "sha512-FCCj9nVZpumPQSEI57jRAA89hQQgONuoC35Lt+rayWY/mzCAc6BQT7RFyFaZKJ2B7IQ8kYjOCPsF/HGFWjdQkQ==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/exporter-metrics-otlp-http": "0.203.0", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-grpc-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-metrics": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-grpc/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-grpc/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-grpc/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-http": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-metrics-otlp-http/-/exporter-metrics-otlp-http-0.203.0.tgz", + "integrity": "sha512-HFSW10y8lY6BTZecGNpV3GpoSy7eaO0Z6GATwZasnT4bEsILp8UJXNG5OmEsz4SdwCSYvyCbTJdNbZP3/8LGCQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-metrics": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-http/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-http/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-http/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-proto": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-metrics-otlp-proto/-/exporter-metrics-otlp-proto-0.203.0.tgz", + "integrity": "sha512-OZnhyd9npU7QbyuHXFEPVm3LnjZYifuKpT3kTnF84mXeEQ84pJJZgyLBpU4FSkSwUkt/zbMyNAI7y5+jYTWGIg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/exporter-metrics-otlp-http": "0.203.0", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-metrics": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-proto/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-metrics-otlp-proto/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-prometheus": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-prometheus/-/exporter-prometheus-0.203.0.tgz", + "integrity": "sha512-2jLuNuw5m4sUj/SncDf/mFPabUxMZmmYetx5RKIMIQyPnl6G6ooFzfeE8aXNRf8YD1ZXNlCnRPcISxjveGJHNg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-metrics": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-prometheus/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-prometheus/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-prometheus/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.203.0.tgz", + "integrity": "sha512-322coOTf81bm6cAA8+ML6A+m4r2xTCdmAZzGNTboPXRzhwPt4JEmovsFAs+grpdarObd68msOJ9FfH3jxM6wqA==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-grpc-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.203.0.tgz", + "integrity": "sha512-ZDiaswNYo0yq/cy1bBLJFe691izEJ6IgNmkjm4C6kE9ub/OMQqDXORx2D2j8fzTBTxONyzusbaZlqtfmyqURPw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-proto/-/exporter-trace-otlp-proto-0.203.0.tgz", + "integrity": "sha512-1xwNTJ86L0aJmWRwENCJlH4LULMG2sOXWIVw+Szta4fkqKVY50Eo4HoVKKq6U9QEytrWCr8+zjw0q/ZOeXpcAQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-zipkin/-/exporter-zipkin-2.0.1.tgz", + "integrity": "sha512-a9eeyHIipfdxzCfc2XPrE+/TI3wmrZUDFtG2RRXHSbZZULAny7SyybSvaDvS77a7iib5MPiAvluwVvbGTsHxsw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/instrumentation": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.203.0.tgz", + "integrity": "sha512-ke1qyM+3AK2zPuBPb6Hk/GCsc5ewbLvPNkEuELx/JmANeEp6ZjnZ+wypPAJSucTw0wvCGrUaibDSdcrGFoWxKQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "import-in-the-middle": "^1.8.1", + "require-in-the-middle": "^7.1.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-amqplib": { + "version": "0.50.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.50.0.tgz", + "integrity": "sha512-kwNs/itehHG/qaQBcVrLNcvXVPW0I4FCOVtw3LHMLdYIqD7GJ6Yv2nX+a4YHjzbzIeRYj8iyMp0Bl7tlkidq5w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-aws-lambda": { + "version": "0.54.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-aws-lambda/-/instrumentation-aws-lambda-0.54.1.tgz", + "integrity": "sha512-qm8pGSAM1mXk7unbrGktWWGJc6IFI58ZsaHJ+i420Fp5VO3Vf7GglIgaXTS8CKBrVB4LHFj3NvzJg31PtsAQcA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/aws-lambda": "8.10.152" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-aws-sdk": { + "version": "0.58.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-aws-sdk/-/instrumentation-aws-sdk-0.58.0.tgz", + "integrity": "sha512-9vFH7gU686dsAeLMCkqUj9y0MQZ1xrTtStSpNV2UaGWtDnRjJrAdJLu9Y545oKEaDTeVaob4UflyZvvpZnw3Xw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.34.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-bunyan": { + "version": "0.49.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-bunyan/-/instrumentation-bunyan-0.49.0.tgz", + "integrity": "sha512-ky5Am1y6s3Ex/3RygHxB/ZXNG07zPfg9Z6Ora+vfeKcr/+I6CJbWXWhSBJor3gFgKN3RvC11UWVURnmDpBS6Pg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "^0.203.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@types/bunyan": "1.8.11" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-cassandra-driver": { + "version": "0.49.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-cassandra-driver/-/instrumentation-cassandra-driver-0.49.0.tgz", + "integrity": "sha512-BNIvqldmLkeikfI5w5Rlm9vG5NnQexfPoxOgEMzfDVOEF+vS6351I6DzWLLgWWR9CNF/jQJJi/lr6am2DLp0Rw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-connect": { + "version": "0.47.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.47.0.tgz", + "integrity": "sha512-pjenvjR6+PMRb6/4X85L4OtkQCootgb/Jzh/l/Utu3SJHBid1F+gk9sTGU2FWuhhEfV6P7MZ7BmCdHXQjgJ42g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/connect": "3.4.38" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-cucumber": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-cucumber/-/instrumentation-cucumber-0.19.0.tgz", + "integrity": "sha512-99ms8kQWRuPt5lkDqbJJzD+7Tq5TMUlBZki4SA2h6CgK4ncX+tyep9XFY1e+XTBLJIWmuFMGbWqBLJ4fSKIQNQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/instrumentation-dataloader": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.21.1.tgz", + "integrity": "sha512-hNAm/bwGawLM8VDjKR0ZUDJ/D/qKR3s6lA5NV+btNaPVm2acqhPcT47l2uCVi+70lng2mywfQncor9v8/ykuyw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-dns": { + "version": "0.47.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dns/-/instrumentation-dns-0.47.0.tgz", + "integrity": "sha512-775fOnewWkTF4iXMGKgwvOGqEmPrU1PZpXjjqvTrEErYBJe7Fz1WlEeUStHepyKOdld7Ghv7TOF/kE3QDctvrg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-express": { + "version": "0.52.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.52.0.tgz", + "integrity": "sha512-W7pizN0Wh1/cbNhhTf7C62NpyYw7VfCFTYg0DYieSTrtPBT1vmoSZei19wfKLnrMsz3sHayCg0HxCVL2c+cz5w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-fastify": { + "version": "0.48.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fastify/-/instrumentation-fastify-0.48.0.tgz", + "integrity": "sha512-3zQlE/DoVfVH6/ycuTv7vtR/xib6WOa0aLFfslYcvE62z0htRu/ot8PV/zmMZfnzpTQj8S/4ULv36R6UIbpJIg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-fs": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.23.0.tgz", + "integrity": "sha512-Puan+QopWHA/KNYvDfOZN6M/JtF6buXEyD934vrb8WhsX1/FuM7OtoMlQyIqAadnE8FqqDL4KDPiEfCQH6pQcQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-generic-pool": { + "version": "0.47.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.47.0.tgz", + "integrity": "sha512-UfHqf3zYK+CwDwEtTjaD12uUqGGTswZ7ofLBEdQ4sEJp9GHSSJMQ2hT3pgBxyKADzUdoxQAv/7NqvL42ZI+Qbw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-graphql": { + "version": "0.51.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.51.0.tgz", + "integrity": "sha512-LchkOu9X5DrXAnPI1+Z06h/EH/zC7D6sA86hhPrk3evLlsJTz0grPrkL/yUJM9Ty0CL/y2HSvmWQCjbJEz/ADg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-grpc": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-grpc/-/instrumentation-grpc-0.203.0.tgz", + "integrity": "sha512-Qmjx2iwccHYRLoE4RFS46CvQE9JG9Pfeae4EPaNZjvIuJxb/pZa2R9VWzRlTehqQWpAvto/dGhtkw8Tv+o0LTg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "0.203.0", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-hapi": { + "version": "0.50.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.50.0.tgz", + "integrity": "sha512-5xGusXOFQXKacrZmDbpHQzqYD1gIkrMWuwvlrEPkYOsjUqGUjl1HbxCsn5Y9bUXOCgP1Lj6A4PcKt1UiJ2MujA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.203.0.tgz", + "integrity": "sha512-y3uQAcCOAwnO6vEuNVocmpVzG3PER6/YZqbPbbffDdJ9te5NkHEkfSMNzlC3+v7KlE+WinPGc3N7MR30G1HY2g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/instrumentation": "0.203.0", + "@opentelemetry/semantic-conventions": "^1.29.0", + "forwarded-parse": "2.1.2" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/instrumentation-ioredis": { + "version": "0.51.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.51.0.tgz", + "integrity": "sha512-9IUws0XWCb80NovS+17eONXsw1ZJbHwYYMXiwsfR9TSurkLV5UNbRSKb9URHO+K+pIJILy9wCxvyiOneMr91Ig==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/redis-common": "^0.38.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-kafkajs": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.13.0.tgz", + "integrity": "sha512-FPQyJsREOaGH64hcxlzTsIEQC4DYANgTwHjiB7z9lldmvua1LRMVn3/FfBlzXoqF179B0VGYviz6rn75E9wsDw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.30.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-knex": { + "version": "0.48.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.48.0.tgz", + "integrity": "sha512-V5wuaBPv/lwGxuHjC6Na2JFRjtPgstw19jTFl1B1b6zvaX8zVDYUDaR5hL7glnQtUSCMktPttQsgK4dhXpddcA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.33.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-koa": { + "version": "0.51.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.51.0.tgz", + "integrity": "sha512-XNLWeMTMG1/EkQBbgPYzCeBD0cwOrfnn8ao4hWgLv0fNCFQu1kCsJYygz2cvKuCs340RlnG4i321hX7R8gj3Rg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-lru-memoizer": { + "version": "0.48.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.48.0.tgz", + "integrity": "sha512-KUW29wfMlTPX1wFz+NNrmE7IzN7NWZDrmFWHM/VJcmFEuQGnnBuTIdsP55CnBDxKgQ/qqYFp4udQFNtjeFosPw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-memcached": { + "version": "0.47.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-memcached/-/instrumentation-memcached-0.47.0.tgz", + "integrity": "sha512-vXDs/l4hlWy1IepPG1S6aYiIZn+tZDI24kAzwKKJmR2QEJRL84PojmALAEJGazIOLl/VdcCPZdMb0U2K0VzojA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/memcached": "^2.2.6" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongodb": { + "version": "0.56.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.56.0.tgz", + "integrity": "sha512-YG5IXUUmxX3Md2buVMvxm9NWlKADrnavI36hbJsihqqvBGsWnIfguf0rUP5Srr0pfPqhQjUP+agLMsvu0GmUpA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongoose": { + "version": "0.50.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.50.0.tgz", + "integrity": "sha512-Am8pk1Ct951r4qCiqkBcGmPIgGhoDiFcRtqPSLbJrUZqEPUsigjtMjoWDRLG1Ki1NHgOF7D0H7d+suWz1AAizw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql": { + "version": "0.49.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.49.0.tgz", + "integrity": "sha512-QU9IUNqNsrlfE3dJkZnFHqLjlndiU39ll/YAAEvWE40sGOCi9AtOF6rmEGzJ1IswoZ3oyePV7q2MP8SrhJfVAA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/mysql": "2.15.27" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql2": { + "version": "0.50.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.50.0.tgz", + "integrity": "sha512-PoOMpmq73rOIE3nlTNLf3B1SyNYGsp7QXHYKmeTZZnJ2Ou7/fdURuOhWOI0e6QZ5gSem18IR1sJi6GOULBQJ9g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@opentelemetry/sql-common": "^0.41.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-nestjs-core": { + "version": "0.49.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-nestjs-core/-/instrumentation-nestjs-core-0.49.0.tgz", + "integrity": "sha512-1R/JFwdmZIk3T/cPOCkVvFQeKYzbbUvDxVH3ShXamUwBlGkdEu5QJitlRMyVNZaHkKZKWgYrBarGQsqcboYgaw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.30.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-net": { + "version": "0.47.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-net/-/instrumentation-net-0.47.0.tgz", + "integrity": "sha512-csoJ++Njpf7C09JH+0HNGenuNbDZBqO1rFhMRo6s0rAmJwNh9zY3M/urzptmKlqbKnf4eH0s+CKHy/+M8fbFsQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-oracledb": { + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-oracledb/-/instrumentation-oracledb-0.29.0.tgz", + "integrity": "sha512-2aHLiJdkyiUbooIUm7FaZf+O4jyqEl+RfFpgud1dxT87QeeYM216wi+xaMNzsb5yKtRBqbA3qeHBCyenYrOZwA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/oracledb": "6.5.2" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-pg": { + "version": "0.56.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.56.1.tgz", + "integrity": "sha512-0/PiHDPVaLdcXNw6Gqb3JBdMxComMEwh444X8glwiynJKJHRTR49+l2cqJfoOVzB8Sl1XRl3Yaqw6aDi3s8e9w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.34.0", + "@opentelemetry/sql-common": "^0.41.0", + "@types/pg": "8.15.5", + "@types/pg-pool": "2.0.6" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-pino": { + "version": "0.50.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.50.1.tgz", + "integrity": "sha512-pBbvuWiHA9iAumAuQ0SKYOXK7NRlbnVTf/qBV0nMdRnxBPrc/GZTbh0f7Y59gZfYsbCLhXLL1oRTEnS+PwS3CA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "^0.203.0", + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-redis": { + "version": "0.52.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.52.0.tgz", + "integrity": "sha512-R8Y7cCZlJ2Vl31S2i7bl5SqyC/aul54ski4wCFip/Tp9WGtLK1xVATi2rwy2wkc8ZCtjdEe9eEVR+QFG6gGZxg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/redis-common": "^0.38.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-restify": { + "version": "0.49.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-restify/-/instrumentation-restify-0.49.0.tgz", + "integrity": "sha512-tsGZZhS4mVZH7omYxw5jpsrD3LhWizqWc0PYtAnzpFUvL5ZINHE+cm57bssTQ2AK/GtZMxu9LktwCvIIf3dSmw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-router": { + "version": "0.48.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-router/-/instrumentation-router-0.48.0.tgz", + "integrity": "sha512-Wixrc8CchuJojXpaS/dCQjFOMc+3OEil1H21G+WLYQb8PcKt5kzW9zDBT19nyjjQOx/D/uHPfgbrT+Dc7cfJ9w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-runtime-node": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-runtime-node/-/instrumentation-runtime-node-0.17.1.tgz", + "integrity": "sha512-c1FlAk+bB2uF9a8YneGmNPTl7c/xVaan4mmWvbkWcOmH/ipKqR1LaKUlz/BMzLrJLjho1EJlG2NrS2w2Arg+nw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-socket.io": { + "version": "0.50.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-socket.io/-/instrumentation-socket.io-0.50.0.tgz", + "integrity": "sha512-6JN6lnKN9ZuZtZdMQIR+no1qHzQvXSZUsNe3sSWMgqmNRyEXuDUWBIyKKeG0oHRHtR4xE4QhJyD4D5kKRPWZFA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-tedious": { + "version": "0.22.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.22.0.tgz", + "integrity": "sha512-XrrNSUCyEjH1ax9t+Uo6lv0S2FCCykcF7hSxBMxKf7Xn0bPRxD3KyFUZy25aQXzbbbUHhtdxj3r2h88SfEM3aA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "@types/tedious": "^4.0.14" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-undici": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.14.0.tgz", + "integrity": "sha512-2HN+7ztxAReXuxzrtA3WboAKlfP5OsPA57KQn2AdYZbJ3zeRPcLXyW4uO/jpLE6PLm0QRtmeGCmfYpqRlwgSwg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.7.0" + } + }, + "node_modules/@opentelemetry/instrumentation-winston": { + "version": "0.48.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-winston/-/instrumentation-winston-0.48.1.tgz", + "integrity": "sha512-XyOuVwdziirHHYlsw+BWrvdI/ymjwnexupKA787zQQ+D5upaE/tseZxjfQa7+t4+FdVLxHICaMTmkSD4yZHpzQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "^0.203.0", + "@opentelemetry/instrumentation": "^0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.203.0.tgz", + "integrity": "sha512-Wbxf7k+87KyvxFr5D7uOiSq/vHXWommvdnNE7vECO3tAhsA2GfOlpWINCMWUEPdHZ7tCXxw6Epp3vgx3jU7llQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-transformer": "0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.203.0.tgz", + "integrity": "sha512-te0Ze1ueJF+N/UOFl5jElJW4U0pZXQ8QklgSfJ2linHN0JJsuaHG8IabEUi2iqxY8ZBDlSiz1Trfv5JcjWWWwQ==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/otlp-exporter-base": "0.203.0", + "@opentelemetry/otlp-transformer": "0.203.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.203.0.tgz", + "integrity": "sha512-Y8I6GgoCna0qDQ2W6GCRtaF24SnvqvA8OfeTi7fqigD23u8Jpb4R5KFv/pRvrlGagcCLICMIyh9wiejp4TXu/A==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-logs": "0.203.0", + "@opentelemetry/sdk-metrics": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1", + "protobufjs": "^7.3.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-b3/-/propagator-b3-2.0.1.tgz", + "integrity": "sha512-Hc09CaQ8Tf5AGLmf449H726uRoBNGPBL4bjr7AnnUpzWMvhdn61F78z9qb6IqB737TffBsokGAK1XykFEZ1igw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-jaeger/-/propagator-jaeger-2.0.1.tgz", + "integrity": "sha512-7PMdPBmGVH2eQNb/AtSJizQNgeNTfh6jQFqys6lfhd6P4r+m/nTh3gKPPpaCXVdRQ+z93vfKk+4UGty390283w==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/redis-common": { + "version": "0.38.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/redis-common/-/redis-common-0.38.2.tgz", + "integrity": "sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==", + "license": "Apache-2.0", + "engines": { + "node": "^18.19.0 || >=20.6.0" + } + }, + "node_modules/@opentelemetry/resource-detector-alibaba-cloud": { + "version": "0.31.9", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-alibaba-cloud/-/resource-detector-alibaba-cloud-0.31.9.tgz", + "integrity": "sha512-V+HbpICyzmJoQHYpiN0xRlj7QqeR9pPo+JZiZztV77L2MdlUCa/Cq7h0gdFNIKc0P9u9rYYYW21oaqdhhC5LZg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^2.0.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-aws": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-aws/-/resource-detector-aws-2.6.0.tgz", + "integrity": "sha512-atZ9/HNXh9ZJuMZUH2TPl89imFZBaoiU0Mksa70ysVhYRzhk3hfJyiu+eETjZ7NhGjBPrd3sfVYEq/St/7+o3g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^2.0.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-azure": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-azure/-/resource-detector-azure-0.10.0.tgz", + "integrity": "sha512-5cNAiyPBg53Uxe/CW7hsCq8HiKNAUGH+gi65TtgpzSR9bhJG4AEbuZhbJDFwe97tn2ifAD1JTkbc/OFuaaFWbA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^2.0.0", + "@opentelemetry/semantic-conventions": "^1.27.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-container": { + "version": "0.7.9", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-container/-/resource-detector-container-0.7.9.tgz", + "integrity": "sha512-BiS14kCylLzh/mayN/sjnOdhnpfgiekaEsIzaL29MErfQR0mFCZjAE2uu8jMjShva9bSDFs65ouuAFft+vBthg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^2.0.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-gcp": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-gcp/-/resource-detector-gcp-0.37.0.tgz", + "integrity": "sha512-LGpJBECIMsVKhiulb4nxUw++m1oF4EiDDPmFGW2aqYaAF0oUvJNv8Z/55CAzcZ7SxvlTgUwzewXDBsuCup7iqw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^2.0.0", + "@opentelemetry/semantic-conventions": "^1.27.0", + "gcp-metadata": "^6.0.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resources": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.1.0.tgz", + "integrity": "sha512-1CJjf3LCvoefUOgegxi8h6r4B/wLSzInyhGP2UmIBYNlo4Qk5CZ73e1eEyWmfXvFtm1ybkmfb2DqWvspsYLrWw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.1.0", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-logs/-/sdk-logs-0.203.0.tgz", + "integrity": "sha512-vM2+rPq0Vi3nYA5akQD2f3QwossDnTDLvKbea6u/A2NZ3XDkPxMfo/PNrDoXhDUD/0pPo2CdH5ce/thn9K0kLw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.4.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-metrics": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.1.0.tgz", + "integrity": "sha512-J9QX459mzqHLL9Y6FZ4wQPRZG4TOpMCyPOh6mkr/humxE1W2S3Bvf4i75yiMW9uyed2Kf5rxmLhTm/UK8vNkAw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.1.0", + "@opentelemetry/resources": "2.1.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node": { + "version": "0.203.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-node/-/sdk-node-0.203.0.tgz", + "integrity": "sha512-zRMvrZGhGVMvAbbjiNQW3eKzW/073dlrSiAKPVWmkoQzah9wfynpVPeL55f9fVIm0GaBxTLcPeukWGy0/Wj7KQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.203.0", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/exporter-logs-otlp-grpc": "0.203.0", + "@opentelemetry/exporter-logs-otlp-http": "0.203.0", + "@opentelemetry/exporter-logs-otlp-proto": "0.203.0", + "@opentelemetry/exporter-metrics-otlp-grpc": "0.203.0", + "@opentelemetry/exporter-metrics-otlp-http": "0.203.0", + "@opentelemetry/exporter-metrics-otlp-proto": "0.203.0", + "@opentelemetry/exporter-prometheus": "0.203.0", + "@opentelemetry/exporter-trace-otlp-grpc": "0.203.0", + "@opentelemetry/exporter-trace-otlp-http": "0.203.0", + "@opentelemetry/exporter-trace-otlp-proto": "0.203.0", + "@opentelemetry/exporter-zipkin": "2.0.1", + "@opentelemetry/instrumentation": "0.203.0", + "@opentelemetry/propagator-b3": "2.0.1", + "@opentelemetry/propagator-jaeger": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/sdk-logs": "0.203.0", + "@opentelemetry/sdk-metrics": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1", + "@opentelemetry/sdk-trace-node": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/context-async-hooks": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.0.1.tgz", + "integrity": "sha512-XuY23lSI3d4PEqKA+7SLtAgwqIfc6E/E9eAQWLN1vlpC53ybO3o6jW4BsXo1xvz9lYyyWItfQDDLzezER01mCw==", + "license": "Apache-2.0", + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/core": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.0.1.tgz", + "integrity": "sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/resources": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.0.1.tgz", + "integrity": "sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-metrics": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.1.tgz", + "integrity": "sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.9.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.1.tgz", + "integrity": "sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.0.1", + "@opentelemetry/resources": "2.0.1", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-trace-node": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-2.0.1.tgz", + "integrity": "sha512-UhdbPF19pMpBtCWYP5lHbTogLWx9N0EBxtdagvkn5YtsAnCBZzL7SjktG+ZmupRgifsHMjwUaCCaVmqGfSADmA==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/context-async-hooks": "2.0.1", + "@opentelemetry/core": "2.0.1", + "@opentelemetry/sdk-trace-base": "2.0.1" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-base": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.1.0.tgz", + "integrity": "sha512-uTX9FBlVQm4S2gVQO1sb5qyBLq/FPjbp+tmGoxu4tIgtYGmBYB44+KX/725RFDe30yBSaA9Ml9fqphe1hbUyLQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "2.1.0", + "@opentelemetry/resources": "2.1.0", + "@opentelemetry/semantic-conventions": "^1.29.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-2.1.0.tgz", + "integrity": "sha512-SvVlBFc/jI96u/mmlKm86n9BbTCbQ35nsPoOohqJX6DXH92K0kTe73zGY5r8xoI1QkjR9PizszVJLzMC966y9Q==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/context-async-hooks": "2.1.0", + "@opentelemetry/core": "2.1.0", + "@opentelemetry/sdk-trace-base": "2.1.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/semantic-conventions": { + "version": "1.37.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.37.0.tgz", + "integrity": "sha512-JD6DerIKdJGmRp4jQyX5FlrQjA4tjOw1cvfsPAZXfOOEErMUHjPcPSICS+6WnM0nB0efSFARh0KAZss+bvExOA==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sql-common": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sql-common/-/sql-common-0.41.2.tgz", + "integrity": "sha512-4mhWm3Z8z+i508zQJ7r6Xi7y4mmoJpdvH0fZPFRkWrdp5fq7hhZ2HhYokEOLkfqSMgPR4Z9EyB3DBkbKGOqZiQ==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/core": "^2.0.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0" + } + }, + "node_modules/@optimize-lodash/rollup-plugin": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@optimize-lodash/rollup-plugin/-/rollup-plugin-5.0.2.tgz", + "integrity": "sha512-UWBD9/C5jO0rDAbiqrZqiTLPD0LOHG3DzBo8ubLTpNWY9xOz5f5+S2yuxG/7ICk8sx8K6pZ8O/jsAbFgjtfh6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@optimize-lodash/transform": "3.0.6", + "@rollup/pluginutils": "^5.1.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "rollup": ">= 4.x" + } + }, + "node_modules/@optimize-lodash/transform": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@optimize-lodash/transform/-/transform-3.0.6.tgz", + "integrity": "sha512-9+qMSaDpahC0+vX2ChM46/ls6a5Ankqs6RTLrHSaFpm7o1mFanP82e+jm9/0o5D660ueK8dWJGPCXQrBxBNNWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "estree-walker": "^2.0.2", + "magic-string": "~0.30.11" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@rollup/plugin-alias": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.1.tgz", + "integrity": "sha512-PR9zDb+rOzkRb2VD+EuKB7UC41vU5DIwZ5qqCpk0KJudcWAyi8rvYOhS7+L5aZCspw1stTViLgN5v6FF1p5cgQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-commonjs": { + "version": "28.0.6", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.6.tgz", + "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "commondir": "^1.0.1", + "estree-walker": "^2.0.2", + "fdir": "^6.2.0", + "is-reference": "1.2.1", + "magic-string": "^0.30.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=16.0.0 || 14 >= 14.17" + }, + "peerDependencies": { + "rollup": "^2.68.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-esm-shim": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@rollup/plugin-esm-shim/-/plugin-esm-shim-0.1.8.tgz", + "integrity": "sha512-xEU0b/BShgDDSPjidhJd4R74J9xZ9jLVtFWNGtsUXyEsdwwwB1a3XOAwwGaNIyUHD6EhxPO21JMfUmJWoMn7SA==", + "dev": true, + "license": "MIT", + "dependencies": { + "magic-string": "^0.30.3", + "mlly": "^1.7.4" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-json": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", + "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.1.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "16.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.2.tgz", + "integrity": "sha512-tCtHJ2BlhSoK4cCs25NMXfV7EALKr0jyasmqVCq3y9cBrKdmJhtsy1iTz36Xhk/O+pDJbzawxF4K6ZblqCnITQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-virtual": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz", + "integrity": "sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.50.2.tgz", + "integrity": "sha512-uLN8NAiFVIRKX9ZQha8wy6UUs06UNSZ32xj6giK/rmMXAgKahwExvK6SsmgU5/brh4w/nSgj8e0k3c1HBQpa0A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.50.2.tgz", + "integrity": "sha512-oEouqQk2/zxxj22PNcGSskya+3kV0ZKH+nQxuCCOGJ4oTXBdNTbv+f/E3c74cNLeMO1S5wVWacSws10TTSB77g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.50.2.tgz", + "integrity": "sha512-OZuTVTpj3CDSIxmPgGH8en/XtirV5nfljHZ3wrNwvgkT5DQLhIKAeuFSiwtbMto6oVexV0k1F1zqURPKf5rI1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.50.2.tgz", + "integrity": "sha512-Wa/Wn8RFkIkr1vy1k1PB//VYhLnlnn5eaJkfTQKivirOvzu5uVd2It01ukeQstMursuz7S1bU+8WW+1UPXpa8A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.50.2.tgz", + "integrity": "sha512-QkzxvH3kYN9J1w7D1A+yIMdI1pPekD+pWx7G5rXgnIlQ1TVYVC6hLl7SOV9pi5q9uIDF9AuIGkuzcbF7+fAhow==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.50.2.tgz", + "integrity": "sha512-dkYXB0c2XAS3a3jmyDkX4Jk0m7gWLFzq1C3qUnJJ38AyxIF5G/dyS4N9B30nvFseCfgtCEdbYFhk0ChoCGxPog==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.50.2.tgz", + "integrity": "sha512-9VlPY/BN3AgbukfVHAB8zNFWB/lKEuvzRo1NKev0Po8sYFKx0i+AQlCYftgEjcL43F2h9Ui1ZSdVBc4En/sP2w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.50.2.tgz", + "integrity": "sha512-+GdKWOvsifaYNlIVf07QYan1J5F141+vGm5/Y8b9uCZnG/nxoGqgCmR24mv0koIWWuqvFYnbURRqw1lv7IBINw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.50.2.tgz", + "integrity": "sha512-df0Eou14ojtUdLQdPFnymEQteENwSJAdLf5KCDrmZNsy1c3YaCNaJvYsEUHnrg+/DLBH612/R0xd3dD03uz2dg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.50.2.tgz", + "integrity": "sha512-iPeouV0UIDtz8j1YFR4OJ/zf7evjauqv7jQ/EFs0ClIyL+by++hiaDAfFipjOgyz6y6xbDvJuiU4HwpVMpRFDQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.50.2.tgz", + "integrity": "sha512-OL6KaNvBopLlj5fTa5D5bau4W82f+1TyTZRr2BdnfsrnQnmdxh4okMxR2DcDkJuh4KeoQZVuvHvzuD/lyLn2Kw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.50.2.tgz", + "integrity": "sha512-I21VJl1w6z/K5OTRl6aS9DDsqezEZ/yKpbqlvfHbW0CEF5IL8ATBMuUx6/mp683rKTK8thjs/0BaNrZLXetLag==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.50.2.tgz", + "integrity": "sha512-Hq6aQJT/qFFHrYMjS20nV+9SKrXL2lvFBENZoKfoTH2kKDOJqff5OSJr4x72ZaG/uUn+XmBnGhfr4lwMRrmqCQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.50.2.tgz", + "integrity": "sha512-82rBSEXRv5qtKyr0xZ/YMF531oj2AIpLZkeNYxmKNN6I2sVE9PGegN99tYDLK2fYHJITL1P2Lgb4ZXnv0PjQvw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.50.2.tgz", + "integrity": "sha512-4Q3S3Hy7pC6uaRo9gtXUTJ+EKo9AKs3BXKc2jYypEcMQ49gDPFU2P1ariX9SEtBzE5egIX6fSUmbmGazwBVF9w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.50.2.tgz", + "integrity": "sha512-9Jie/At6qk70dNIcopcL4p+1UirusEtznpNtcq/u/C5cC4HBX7qSGsYIcG6bdxj15EYWhHiu02YvmdPzylIZlA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.50.2.tgz", + "integrity": "sha512-HPNJwxPL3EmhzeAnsWQCM3DcoqOz3/IC6de9rWfGR8ZCuEHETi9km66bH/wG3YH0V3nyzyFEGUZeL5PKyy4xvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.50.2.tgz", + "integrity": "sha512-nMKvq6FRHSzYfKLHZ+cChowlEkR2lj/V0jYj9JnGUVPL2/mIeFGmVM2mLaFeNa5Jev7W7TovXqXIG2d39y1KYA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.50.2.tgz", + "integrity": "sha512-eFUvvnTYEKeTyHEijQKz81bLrUQOXKZqECeiWH6tb8eXXbZk+CXSG2aFrig2BQ/pjiVRj36zysjgILkqarS2YA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.50.2.tgz", + "integrity": "sha512-cBaWmXqyfRhH8zmUxK3d3sAhEWLrtMjWBRwdMMHJIXSjvjLKvv49adxiEz+FJ8AP90apSDDBx2Tyd/WylV6ikA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.50.2.tgz", + "integrity": "sha512-APwKy6YUhvZaEoHyM+9xqmTpviEI+9eL7LoCH+aLcvWYHJ663qG5zx7WzWZY+a9qkg5JtzcMyJ9z0WtQBMDmgA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sindresorhus/slugify": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@sindresorhus/slugify/-/slugify-2.2.1.tgz", + "integrity": "sha512-MkngSCRZ8JdSOCHRaYd+D01XhvU3Hjy6MGl06zhOk614hp9EOAp5gIkBeQg7wtmxpitU6eAL4kdiRMcJa2dlrw==", + "license": "MIT", + "dependencies": { + "@sindresorhus/transliterate": "^1.0.0", + "escape-string-regexp": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sindresorhus/transliterate": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/transliterate/-/transliterate-1.6.0.tgz", + "integrity": "sha512-doH1gimEu3A46VX6aVxpHTeHrytJAG6HgdxntYnCFiIFHEM/ZGpG8KiZGBChchjQmG0XFIBL552kBTjVcMZXwQ==", + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "license": "MIT" + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.152", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.152.tgz", + "integrity": "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw==", + "license": "MIT" + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/bunyan": { + "version": "1.8.11", + "resolved": "https://registry.npmjs.org/@types/bunyan/-/bunyan-1.8.11.tgz", + "integrity": "sha512-758fRH7umIMk5qt5ELmRMff4mLDlN+xyYzC+dkPTdKwbSkJFvz6xwyScrytPU0QIBbRRwbiE8/BIg8bpajerNQ==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/cors": { + "version": "2.8.19", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", + "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/express": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz", + "integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==", + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.7", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.7.tgz", + "integrity": "sha512-FvPtiIf1LfhzsaIXhv/PHan/2FeQBbtBDtfX2QfvPxdUelMDEckK08SM6nqo1MIZY3RUlfA+HV8+hFUSio78qg==", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "license": "MIT" + }, + "node_modules/@types/memcached": { + "version": "2.2.10", + "resolved": "https://registry.npmjs.org/@types/memcached/-/memcached-2.2.10.tgz", + "integrity": "sha512-AM9smvZN55Gzs2wRrqeMHVP7KE8KWgCJO/XL5yCly2xF6EKa4YlbpK+cLSAH4NG/Ah64HrlegmGqW8kYws7Vxg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "license": "MIT" + }, + "node_modules/@types/mysql": { + "version": "2.15.27", + "resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.27.tgz", + "integrity": "sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "24.7.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.7.0.tgz", + "integrity": "sha512-IbKooQVqUBrlzWTi79E8Fw78l8k1RNtlDDNWsFZs7XonuQSJ8oNYfEeclhprUldXISRMLzBpILuKgPlIxm+/Yw==", + "license": "MIT", + "dependencies": { + "undici-types": "~7.14.0" + } + }, + "node_modules/@types/oracledb": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@types/oracledb/-/oracledb-6.5.2.tgz", + "integrity": "sha512-kK1eBS/Adeyis+3OlBDMeQQuasIDLUYXsi2T15ccNJ0iyUpQ4xDF7svFu3+bGVrI0CMBUclPciz+lsQR3JX3TQ==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/pg": { + "version": "8.15.5", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.5.tgz", + "integrity": "sha512-LF7lF6zWEKxuT3/OR8wAZGzkg4ENGXFNyiV/JeOt9z5B+0ZVwbql9McqX5c/WStFq1GaGso7H1AzP/qSzmlCKQ==", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/@types/pg-pool": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/pg-pool/-/pg-pool-2.0.6.tgz", + "integrity": "sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ==", + "license": "MIT", + "dependencies": { + "@types/pg": "*" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "license": "MIT" + }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/send/-/send-1.2.0.tgz", + "integrity": "sha512-zBF6vZJn1IaMpg3xUF25VK3gd3l8zwE0ZLRX7dsQyQi+jp4E8mMDJNGDYnYse+bQhYwWERTxVwHpi3dMOq7RKQ==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.9.tgz", + "integrity": "sha512-dOTIuqpWLyl3BBXU3maNQsS4A3zuuoYRNIvYSxxhebPfXg2mzWQEPne/nlJ37yOse6uGgR386uTpdsx4D0QZWA==", + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "<1" + } + }, + "node_modules/@types/serve-static/node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/tedious": { + "version": "4.0.14", + "resolved": "https://registry.npmjs.org/@types/tedious/-/tedious-4.0.14.tgz", + "integrity": "sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@vercel/oidc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@vercel/oidc/-/oidc-3.0.2.tgz", + "integrity": "sha512-JekxQ0RApo4gS4un/iMGsIL1/k4KUBe3HmnGcDvzHuFBdQdudEJgTqcsJC7y6Ul4Yw5CeykgvQbX2XeEJd0+DA==", + "license": "Apache-2.0", + "engines": { + "node": ">= 20" + } + }, + "node_modules/@webcontainer/env": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webcontainer/env/-/env-1.1.1.tgz", + "integrity": "sha512-6aN99yL695Hi9SuIk1oC88l9o0gmxL1nGWWQ/kNy81HigJ0FoaoTXpytCj6ItzgyCEwA9kF1wixsTuv5cjsgng==", + "dev": true, + "license": "MIT" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "license": "MIT", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "license": "MIT", + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/ai": { + "version": "4.3.19", + "resolved": "https://registry.npmjs.org/ai/-/ai-4.3.19.tgz", + "integrity": "sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "1.1.3", + "@ai-sdk/provider-utils": "2.2.8", + "@ai-sdk/react": "1.2.12", + "@ai-sdk/ui-utils": "1.2.11", + "@opentelemetry/api": "1.9.0", + "jsondiffpatch": "0.6.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "react": "^18 || ^19 || ^19.0.0-rc", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + } + } + }, + "node_modules/ai-v5": { + "name": "ai", + "version": "5.0.60", + "resolved": "https://registry.npmjs.org/ai/-/ai-5.0.60.tgz", + "integrity": "sha512-80U/3kmdBW6g+JkLXpz/P2EwkyEaWlPlYtuLUpx/JYK9F7WZh9NnkYoh1KvUi1Sbpo0NyurBTvX0a2AG9mmbDA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/gateway": "1.0.33", + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.10", + "@opentelemetry/api": "1.9.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/ai-v5/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/ai-v5/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz", + "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/axios": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.8.18", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.18.tgz", + "integrity": "sha512-UYmTpOBwgPScZpS4A+YbapwWuBwasxvO/2IOHArSsAhL/+ZdmATBXTex3t+l2hXwLVYK382ibr/nKoY9GKe86w==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/bignumber.js": { + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.3.1.tgz", + "integrity": "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.26.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.3.tgz", + "integrity": "sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.8.9", + "caniuse-lite": "^1.0.30001746", + "electron-to-chromium": "^1.5.227", + "node-releases": "^2.0.21", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/builtins": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", + "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.0.0" + } + }, + "node_modules/builtins/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/bundle-name": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", + "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", + "license": "MIT", + "dependencies": { + "run-applescript": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001751", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", + "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "license": "MIT" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", + "dev": true, + "license": "MIT" + }, + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/date-fns": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", + "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/default-browser": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.2.1.tgz", + "integrity": "sha512-WY/3TUME0x3KPYdRRxEJJvXRHV4PyPoUsxtZa78lwItwRQRHhd2U9xOscaT/YTf8uCXIAjeJOFBVEh/7FtD8Xg==", + "license": "MIT", + "dependencies": { + "bundle-name": "^4.1.0", + "default-browser-id": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.0.tgz", + "integrity": "sha512-A6p/pu/6fyBcA1TRz/GqWYPViplrftcW2gZC9q79ngNCKAeR/X3gcEdXQHl4KNXV+3wgIJ1CPkJQ3IHM6lcsyA==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/dotenv": { + "version": "17.2.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz", + "integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.237", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.237.tgz", + "integrity": "sha512-icUt1NvfhGLar5lSWH3tHNzablaA5js3HVHacQimfP8ViEBOQv+L7DKEuHdbTZ0SKCO1ogTJTIL1Gwk9S6Qvcg==", + "dev": true, + "license": "ISC" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz", + "integrity": "sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.10", + "@esbuild/android-arm": "0.25.10", + "@esbuild/android-arm64": "0.25.10", + "@esbuild/android-x64": "0.25.10", + "@esbuild/darwin-arm64": "0.25.10", + "@esbuild/darwin-x64": "0.25.10", + "@esbuild/freebsd-arm64": "0.25.10", + "@esbuild/freebsd-x64": "0.25.10", + "@esbuild/linux-arm": "0.25.10", + "@esbuild/linux-arm64": "0.25.10", + "@esbuild/linux-ia32": "0.25.10", + "@esbuild/linux-loong64": "0.25.10", + "@esbuild/linux-mips64el": "0.25.10", + "@esbuild/linux-ppc64": "0.25.10", + "@esbuild/linux-riscv64": "0.25.10", + "@esbuild/linux-s390x": "0.25.10", + "@esbuild/linux-x64": "0.25.10", + "@esbuild/netbsd-arm64": "0.25.10", + "@esbuild/netbsd-x64": "0.25.10", + "@esbuild/openbsd-arm64": "0.25.10", + "@esbuild/openbsd-x64": "0.25.10", + "@esbuild/openharmony-arm64": "0.25.10", + "@esbuild/sunos-x64": "0.25.10", + "@esbuild/win32-arm64": "0.25.10", + "@esbuild/win32-ia32": "0.25.10", + "@esbuild/win32-x64": "0.25.10" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/execa": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.0.tgz", + "integrity": "sha512-jpWzZ1ZhwUmeWRhS7Qv3mhpOhLfwI+uAX4e5fOcXqwMR7EcJ0pj2kV1CVzHVMX/LphnKWD3LObjZCoJ71lKpHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^4.0.0", + "cross-spawn": "^7.0.6", + "figures": "^6.1.0", + "get-stream": "^9.0.0", + "human-signals": "^8.0.1", + "is-plain-obj": "^4.1.0", + "is-stream": "^4.0.1", + "npm-run-path": "^6.0.0", + "pretty-ms": "^9.2.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^4.0.0", + "yoctocolors": "^2.1.1" + }, + "engines": { + "node": "^18.19.0 || >=20.5.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/exit-hook": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-4.0.0.tgz", + "integrity": "sha512-Fqs7ChZm72y40wKjOFXBKg7nJZvQJmewP5/7LtePDdnah/+FH9Hp5sgMujSCMPXlxOAW2//1jrW9pnsY7o20vQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/express/node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express/node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/express/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/express/node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/exsolve": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz", + "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fast-copy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.2.tgz", + "integrity": "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==", + "license": "MIT" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/figures": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-6.1.0.tgz", + "integrity": "sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-unicode-supported": "^2.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/find-workspaces": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/find-workspaces/-/find-workspaces-0.3.1.tgz", + "integrity": "sha512-UDkGILGJSA1LN5Aa7McxCid4sqW3/e+UYsVwyxki3dDT0F8+ym0rAfnCkEfkL0rO7M+8/mvkim4t/s3IPHmg+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-glob": "^3.3.2", + "pkg-types": "^1.0.3", + "yaml": "^2.3.4" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formdata-node": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-6.0.3.tgz", + "integrity": "sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/forwarded-parse": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/forwarded-parse/-/forwarded-parse-2.1.2.tgz", + "integrity": "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==", + "license": "MIT" + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-extra": { + "version": "11.3.2", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", + "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gaxios/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz", + "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^6.1.1", + "google-logging-utils": "^0.0.2", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-port": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-7.1.0.tgz", + "integrity": "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.11.0.tgz", + "integrity": "sha512-sNsqf7XKQ38IawiVGPOoAlqZo1DMrO7TU+ZcZwi7yLl7/7S0JwmoBMKz/IkUPhSoXM0Ng3vT0yB1iCe5XavDeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/google-logging-utils": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz", + "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", + "license": "MIT" + }, + "node_modules/hono": { + "version": "4.10.3", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.10.3.tgz", + "integrity": "sha512-2LOYWUbnhdxdL8MNbNg9XZig6k+cZXm5IjHn2Aviv7honhBMOHb+jxrKIeJRZJRmn+htUCKhaicxwXuUDlchRA==", + "license": "MIT", + "engines": { + "node": ">=16.9.0" + } + }, + "node_modules/hono-openapi": { + "version": "0.4.8", + "resolved": "https://registry.npmjs.org/hono-openapi/-/hono-openapi-0.4.8.tgz", + "integrity": "sha512-LYr5xdtD49M7hEAduV1PftOMzuT8ZNvkyWfh1DThkLsIr4RkvDb12UxgIiFbwrJB6FLtFXLoOZL9x4IeDk2+VA==", + "license": "MIT", + "dependencies": { + "json-schema-walker": "^2.0.0" + }, + "peerDependencies": { + "@hono/arktype-validator": "^2.0.0", + "@hono/effect-validator": "^1.2.0", + "@hono/typebox-validator": "^0.2.0 || ^0.3.0", + "@hono/valibot-validator": "^0.5.1", + "@hono/zod-validator": "^0.4.1", + "@sinclair/typebox": "^0.34.9", + "@valibot/to-json-schema": "^1.0.0-beta.3", + "arktype": "^2.0.0", + "effect": "^3.11.3", + "hono": "^4.6.13", + "openapi-types": "^12.1.3", + "valibot": "^1.0.0-beta.9", + "zod": "^3.23.8", + "zod-openapi": "^4.0.0" + }, + "peerDependenciesMeta": { + "@hono/arktype-validator": { + "optional": true + }, + "@hono/effect-validator": { + "optional": true + }, + "@hono/typebox-validator": { + "optional": true + }, + "@hono/valibot-validator": { + "optional": true + }, + "@hono/zod-validator": { + "optional": true + }, + "@sinclair/typebox": { + "optional": true + }, + "@valibot/to-json-schema": { + "optional": true + }, + "arktype": { + "optional": true + }, + "effect": { + "optional": true + }, + "hono": { + "optional": true + }, + "valibot": { + "optional": true + }, + "zod": { + "optional": true + }, + "zod-openapi": { + "optional": true + } + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/import-in-the-middle": { + "version": "1.14.4", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.14.4.tgz", + "integrity": "sha512-eWjxh735SJLFJJDs5X82JQ2405OdJeAHDBnaoFCfdr5GVc7AWc9xU7KbrF+3Xd5F2ccP1aQFKtY+65X6EfKZ7A==", + "license": "Apache-2.0", + "dependencies": { + "acorn": "^8.14.0", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ip-regex": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-4.3.0.tgz", + "integrity": "sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-network-error": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.0.tgz", + "integrity": "sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-reference": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", + "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-url": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-url/-/is-url-1.2.4.tgz", + "integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-wsl": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.0.tgz", + "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==", + "license": "MIT", + "dependencies": { + "is-inside-container": "^1.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is2": { + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/is2/-/is2-2.0.9.tgz", + "integrity": "sha512-rZkHeBn9Zzq52sd9IUIV3a5mfwBY+o2HePMh0wkGBM4z4qjvy2GwVxQ6nNXSfw6MmVP6gf1QIlWjiOavhM3x5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "ip-regex": "^4.1.0", + "is-url": "^1.2.4" + }, + "engines": { + "node": ">=v0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/js-base64": { + "version": "3.7.7", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.7.tgz", + "integrity": "sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==", + "license": "BSD-3-Clause" + }, + "node_modules/js-tiktoken": { + "version": "1.0.21", + "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz", + "integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==", + "license": "MIT", + "dependencies": { + "base64-js": "^1.5.1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "license": "MIT", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "license": "(AFL-2.1 OR BSD-3-Clause)" + }, + "node_modules/json-schema-to-zod": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/json-schema-to-zod/-/json-schema-to-zod-2.6.1.tgz", + "integrity": "sha512-uiHmWH21h9FjKJkRBntfVGTLpYlCZ1n98D0izIlByqQLqpmkQpNTBtfbdP04Na6+43lgsvrShFh2uWLkQDKJuQ==", + "license": "ISC", + "bin": { + "json-schema-to-zod": "dist/cjs/cli.js" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/json-schema-walker": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-walker/-/json-schema-walker-2.0.0.tgz", + "integrity": "sha512-nXN2cMky0Iw7Af28w061hmxaPDaML5/bQD9nwm1lOoIKEGjHcRGxqWe4MfrkYThYAPjSUhmsp4bJNoLAyVn9Xw==", + "license": "MIT", + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^11.1.0", + "clone": "^2.1.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/json-schema-walker/node_modules/@apidevtools/json-schema-ref-parser": { + "version": "11.9.3", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.9.3.tgz", + "integrity": "sha512-60vepv88RwcJtSHrD6MjIL6Ta3SOYbgfnkHb+ppAVK+o9mXprRtulx7VlRl3lN3bbvysAfCS7WMVfhUYemB0IQ==", + "license": "MIT", + "dependencies": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.15", + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/philsturgeon" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsondiffpatch": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.7.3.tgz", + "integrity": "sha512-zd4dqFiXSYyant2WgSXAZ9+yYqilNVvragVNkNRn2IFZKgjyULNrKRznqN4Zon0MkLueCg+3QaPVCnDAVP20OQ==", + "license": "MIT", + "dependencies": { + "@dmsnell/diff-match-patch": "^1.1.0" + }, + "bin": { + "jsondiffpatch": "bin/jsondiffpatch.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/klavis": { + "version": "2.10.2", + "resolved": "https://registry.npmjs.org/klavis/-/klavis-2.10.2.tgz", + "integrity": "sha512-cNi+wQe+SNkDFLb/Y+UUWHepKTljxKnbgFud+UzHqs9ga973Ruvh1durs6BYzoEe7gFhJBQYEiLiq/FIxghltA==", + "dependencies": { + "form-data": "^4.0.0", + "formdata-node": "^6.0.3", + "js-base64": "3.7.7", + "node-fetch": "^2.7.0", + "qs": "^6.13.1", + "readable-stream": "^4.5.2", + "url-join": "4.0.1" + } + }, + "node_modules/local-pkg": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-1.1.2.tgz", + "integrity": "sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==", + "dev": true, + "license": "MIT", + "dependencies": { + "mlly": "^1.7.4", + "pkg-types": "^2.3.0", + "quansync": "^0.2.11" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/local-pkg/node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/local-pkg/node_modules/pkg-types": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", + "license": "Apache-2.0" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/magic-string": { + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mastra": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/mastra/-/mastra-0.17.0.tgz", + "integrity": "sha512-PKFqD6toWiTcoBLa6B/9Pqb1SEQNptr+4SHTWNl27gTaprmad3ZSC/b2yvRNyNG25aDBeYs0YQGoeuD81mLhxg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@clack/prompts": "^0.11.0", + "@expo/devcert": "^1.2.0", + "@mastra/deployer": "^0.21.1", + "@mastra/loggers": "^0.10.16", + "@opentelemetry/auto-instrumentations-node": "^0.62.1", + "@opentelemetry/core": "^2.0.1", + "@opentelemetry/exporter-trace-otlp-grpc": "^0.203.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.203.0", + "@opentelemetry/instrumentation": "^0.203.0", + "@opentelemetry/resources": "^2.0.1", + "@opentelemetry/sdk-node": "^0.203.0", + "@opentelemetry/sdk-trace-base": "^2.0.1", + "@opentelemetry/semantic-conventions": "^1.36.0", + "@webcontainer/env": "^1.1.1", + "commander": "^12.1.0", + "dotenv": "^16.6.1", + "execa": "^9.6.0", + "fs-extra": "^11.3.2", + "get-port": "^7.1.0", + "open": "^10.1.2", + "picocolors": "^1.1.1", + "posthog-node": "4.18.0", + "prettier": "^3.6.2", + "shell-quote": "^1.8.3", + "strip-json-comments": "^5.0.3", + "tcp-port-used": "^1.0.2", + "yocto-spinner": "^0.2.3" + }, + "bin": { + "mastra": "dist/index.js" + }, + "peerDependencies": { + "@mastra/core": ">=0.21.0-0 <0.22.0-0", + "zod": "^3.25.0 || ^4.0.0" + } + }, + "node_modules/mastra/node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mlly": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" + } + }, + "node_modules/module-details-from-path": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.4.tgz", + "integrity": "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==", + "license": "MIT" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-releases": { + "version": "2.0.25", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.25.tgz", + "integrity": "sha512-4auku8B/vw5psvTiiN9j1dAOsXvMoGqJuKJcR+dTdqiXEK20mMTk1UEo3HS16LeGQsVG6+qKTPM9u/qQ2LqATA==", + "dev": true, + "license": "MIT" + }, + "node_modules/npm-run-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", + "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/open": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/open/-/open-10.2.0.tgz", + "integrity": "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==", + "license": "MIT", + "dependencies": { + "default-browser": "^5.2.1", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "wsl-utils": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==", + "license": "MIT", + "peer": true + }, + "node_modules/p-map": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.3.tgz", + "integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-retry": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.0.tgz", + "integrity": "sha512-xL4PiFRQa/f9L9ZvR4/gUCRNus4N8YX80ku8kv9Jqz+ZokkiZLM0bcvX0gm1F3PDi9SPRsww1BDsTWgE6Y1GLQ==", + "license": "MIT", + "dependencies": { + "is-network-error": "^1.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/parse-ms": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", + "integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pino": { + "version": "9.13.1", + "resolved": "https://registry.npmjs.org/pino/-/pino-9.13.1.tgz", + "integrity": "sha512-Szuj+ViDTjKPQYiKumGmEn3frdl+ZPSdosHyt9SnUevFosOkMY2b7ipxlEctNKPmMD/VibeBI+ZcZCJK+4DPuw==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "slow-redact": "^0.3.0", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", + "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", + "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", + "license": "MIT", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^3.0.2", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/pino-std-serializers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", + "license": "MIT" + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/posthog-node": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-4.18.0.tgz", + "integrity": "sha512-XROs1h+DNatgKh/AlIlCtDxWzwrKdYDb2mOs58n4yN8BkGN9ewqeQwG5ApS4/IzwCb7HPttUkOVulkYatd2PIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "axios": "^1.8.2" + }, + "engines": { + "node": ">=15.0.0" + } + }, + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-ms": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.3.0.tgz", + "integrity": "sha512-gjVS5hOP+M3wMm5nmNOucbIrqudzs9v/57bWRHQWLYklXqoXKrVfYW2W9+glfGsqtPgpiz5WwyEEB+ksXIx3gQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-ms": "^4.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/protobufjs": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/quansync": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.11.tgz", + "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/antfu" + }, + { + "type": "individual", + "url": "https://github.com/sponsors/sxzz" + } + ], + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, + "node_modules/radash": { + "version": "12.1.1", + "resolved": "https://registry.npmjs.org/radash/-/radash-12.1.1.tgz", + "integrity": "sha512-h36JMxKRqrAxVD8201FrCpyeNuUY9Y5zZwujr20fFO77tpUtGa6EZzfKw/3WaiBX95fq7+MpsuMLNdSnORAwSA==", + "license": "MIT", + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz", + "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.7.0", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", + "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/react": { + "version": "19.2.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.0.tgz", + "integrity": "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "license": "MIT", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-in-the-middle": { + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-7.5.2.tgz", + "integrity": "sha512-gAZ+kLqBdHarXB64XpAe2VCjB7rIRv+mU8tfRWziHRJ5umKsIHN2tLLv6EtMw7WCdP19S0ERVMldNvxYCHnhSQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "module-details-from-path": "^1.0.3", + "resolve": "^1.22.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.50.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.50.2.tgz", + "integrity": "sha512-BgLRGy7tNS9H66aIMASq1qSYbAAJV6Z6WR4QYTvj5FgF15rZ/ympT1uixHXwzbZUBDbkvqUI1KR0fH1FhMaQ9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.50.2", + "@rollup/rollup-android-arm64": "4.50.2", + "@rollup/rollup-darwin-arm64": "4.50.2", + "@rollup/rollup-darwin-x64": "4.50.2", + "@rollup/rollup-freebsd-arm64": "4.50.2", + "@rollup/rollup-freebsd-x64": "4.50.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.50.2", + "@rollup/rollup-linux-arm-musleabihf": "4.50.2", + "@rollup/rollup-linux-arm64-gnu": "4.50.2", + "@rollup/rollup-linux-arm64-musl": "4.50.2", + "@rollup/rollup-linux-loong64-gnu": "4.50.2", + "@rollup/rollup-linux-ppc64-gnu": "4.50.2", + "@rollup/rollup-linux-riscv64-gnu": "4.50.2", + "@rollup/rollup-linux-riscv64-musl": "4.50.2", + "@rollup/rollup-linux-s390x-gnu": "4.50.2", + "@rollup/rollup-linux-x64-gnu": "4.50.2", + "@rollup/rollup-linux-x64-musl": "4.50.2", + "@rollup/rollup-openharmony-arm64": "4.50.2", + "@rollup/rollup-win32-arm64-msvc": "4.50.2", + "@rollup/rollup-win32-ia32-msvc": "4.50.2", + "@rollup/rollup-win32-x64-msvc": "4.50.2", + "fsevents": "~2.3.2" + } + }, + "node_modules/rollup-plugin-esbuild": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/rollup-plugin-esbuild/-/rollup-plugin-esbuild-6.2.1.tgz", + "integrity": "sha512-jTNOMGoMRhs0JuueJrJqbW8tOwxumaWYq+V5i+PD+8ecSCVkuX27tGW7BXqDgoULQ55rO7IdNxPcnsWtshz3AA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "es-module-lexer": "^1.6.0", + "get-tsconfig": "^4.10.0", + "unplugin-utils": "^0.2.4" + }, + "engines": { + "node": ">=14.18.0" + }, + "peerDependencies": { + "esbuild": ">=0.18.0", + "rollup": "^1.20.0 || ^2.0.0 || ^3.0.0 || ^4.0.0" + } + }, + "node_modules/rollup-plugin-node-externals": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/rollup-plugin-node-externals/-/rollup-plugin-node-externals-8.1.1.tgz", + "integrity": "sha512-MEWJmXMGjo5E7o9hgAmma6XLCdU9gTVRcaaCubugTJdoJD3A91qxtxiukT9k2PeUdogtCaNehV3pvJUWrRNtwg==", + "dev": true, + "funding": [ + { + "type": "patreon", + "url": "https://patreon.com/Septh" + }, + { + "type": "paypal", + "url": "https://paypal.me/septh07" + } + ], + "license": "MIT", + "engines": { + "node": ">= 21 || ^20.6.0 || ^18.19.0" + }, + "peerDependencies": { + "rollup": "^4.0.0" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/router/node_modules/path-to-regexp": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/run-applescript": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", + "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==", + "license": "BSD-3-Clause" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sift": { + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz", + "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==", + "license": "MIT" + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slow-redact": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/slow-redact/-/slow-redact-0.3.1.tgz", + "integrity": "sha512-NvFvl1GuLZNW4U046Tfi8b26zXo8aBzgCAS2f7yVJR/fArN93mOqSA99cB9uITm92ajSz01bsu1K7SCVVjIMpQ==", + "license": "MIT" + }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", + "integrity": "sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/swr": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/swr/-/swr-2.3.6.tgz", + "integrity": "sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.3", + "use-sync-external-store": "^1.4.0" + }, + "peerDependencies": { + "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/tcp-port-used": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tcp-port-used/-/tcp-port-used-1.0.2.tgz", + "integrity": "sha512-l7ar8lLUD3XS1V2lfoJlCBaeoaWo/2xfYt81hM7VlvR4RrMVFqfmzfhLVk40hAb368uitje5gPtBRL1m/DGvLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "4.3.1", + "is2": "^2.0.6" + } + }, + "node_modules/tcp-port-used/node_modules/debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/tcp-port-used/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/thread-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + } + }, + "node_modules/throttleit": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz", + "integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/tsx": { + "version": "4.20.6", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.6.tgz", + "integrity": "sha512-ytQKuwgmrrkDTFP4LjR0ToE2nqgy886GpvRSpU0JAnrdBYppuY5rLkRUYPU1yCryb24SsKBTL/hlDQAEFVwtZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.25.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-paths": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/typescript-paths/-/typescript-paths-1.5.1.tgz", + "integrity": "sha512-lYErSLCON2MSplVV5V/LBgD4UNjMgY3guATdFCZY2q1Nr6OZEu4q6zX/rYMsG1TaWqqQSszg6C9EU7AGWMDrIw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": "^4.7.2 || ^5" + } + }, + "node_modules/ufo": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", + "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/undici-types": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", + "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", + "license": "MIT" + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/unplugin-utils": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.2.5.tgz", + "integrity": "sha512-gwXJnPRewT4rT7sBi/IvxKTjsms7jX7QIDLOClApuZwR49SXbrB1z2NLUZ+vDHyqCj/n58OzRRqaW+B8OZi8vg==", + "dev": true, + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=18.12.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", + "license": "MIT" + }, + "node_modules/use-sync-external-store": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/wsl-utils": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.1.0.tgz", + "integrity": "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==", + "license": "MIT", + "dependencies": { + "is-wsl": "^3.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/xstate": { + "version": "5.22.1", + "resolved": "https://registry.npmjs.org/xstate/-/xstate-5.22.1.tgz", + "integrity": "sha512-ajh2WQw2+nM6K3ZDTBbhODb72SOLlOnd/t92jSXyOH5rTF/T0XtBIwXxbEBvsdwdnzaAaEcrxZu4aEKr/781pQ==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/xstate" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yocto-spinner": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/yocto-spinner/-/yocto-spinner-0.2.3.tgz", + "integrity": "sha512-sqBChb33loEnkoXte1bLg45bEBsOP9N1kzQh5JZNKj/0rik4zAPTNSAVPj3uQAdc6slYJ0Ksc403G2XgxsJQFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yoctocolors": "^2.1.1" + }, + "engines": { + "node": ">=18.19" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoctocolors": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz", + "integrity": "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-from-json-schema": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/zod-from-json-schema/-/zod-from-json-schema-0.5.0.tgz", + "integrity": "sha512-W1v1YIoimOJfvuorGGp1QroizLL3jEGELJtgrHiVg/ytxVZdh/BTTVyPypGB7YK30LHrCkkebbjuyHIjBGCEzw==", + "license": "MIT", + "dependencies": { + "zod": "^4.0.17" + } + }, + "node_modules/zod-from-json-schema-v3": { + "name": "zod-from-json-schema", + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/zod-from-json-schema/-/zod-from-json-schema-0.0.5.tgz", + "integrity": "sha512-zYEoo86M1qpA1Pq6329oSyHLS785z/mTwfr9V1Xf/ZLhuuBGaMlDGu/pDVGVUe4H4oa1EFgWZT53DP0U3oT9CQ==", + "license": "MIT", + "dependencies": { + "zod": "^3.24.2" + } + }, + "node_modules/zod-from-json-schema/node_modules/zod": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.1.12.tgz", + "integrity": "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/examples/mastra-klavis/package.json b/examples/mastra-klavis/package.json new file mode 100644 index 00000000..4030dfc3 --- /dev/null +++ b/examples/mastra-klavis/package.json @@ -0,0 +1,32 @@ +{ + "name": "mastra-klavis-example", + "version": "1.0.0", + "description": "Mastra MCP agent example using Klavis", + "main": "index.ts", + "type": "module", + "scripts": { + "dev": "mastra dev", + "build": "mastra build", + "start": "mastra start" + }, + "dependencies": { + "@mastra/core": "^0.21.1", + "@mastra/mcp": "^0.13.5", + "@ai-sdk/openai": "^1.3.23", + "klavis": "^2.10.2", + "dotenv": "^17.2.0", + "open": "^10.1.0" + }, + "devDependencies": { + "@types/node": "^24.0.13", + "typescript": "^5.8.3", + "tsx": "^4.19.0", + "mastra": "^0.17.0" + }, + "overrides": { + "jsondiffpatch": "^0.7.2" + }, + "keywords": [], + "author": "", + "license": "MIT" +} \ No newline at end of file diff --git a/examples/mastra-klavis/src/mastra/index.ts b/examples/mastra-klavis/src/mastra/index.ts new file mode 100644 index 00000000..13d3a6bc --- /dev/null +++ b/examples/mastra-klavis/src/mastra/index.ts @@ -0,0 +1,51 @@ +import { Mastra } from '@mastra/core/mastra'; +import { Agent } from '@mastra/core/agent'; +import { openai } from '@ai-sdk/openai'; +import { MCPClient } from '@mastra/mcp'; +import { KlavisClient, Klavis } from 'klavis'; +import open from 'open'; + +/** + * Creates an MCP Agent with tools from Klavis Strata server + * Strata servers support multiple MCP servers (Here use Gmail, Slack) + */ +export const createMcpAgent = async (userId: string = 'test-user'): Promise => { + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP Server with Gmail and Slack + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Slack], + userId + }); + + // Handle OAuth authorization for each service + if (response.oauthUrls) { + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + await open(oauthUrl); + console.log(`Please complete ${serverName} OAuth authorization at: ${oauthUrl}`); + } + } + + // Initialize the MCP client with Strata server URL + const mcpClient = new MCPClient({ + servers: { + strata: { + url: new URL(response.strataServerUrl) + } + } + }); + + // Create agent + return new Agent({ + name: 'MCP Agent', + instructions: `You are an AI agent with access to MCP tools.`, + model: openai('gpt-4o'), + tools: await mcpClient.getTools() + }); +}; + +const agent = await createMcpAgent(); + +export const mastra = new Mastra({ + agents: { agent } +}); \ No newline at end of file diff --git a/examples/mastra-klavis/tsconfig.json b/examples/mastra-klavis/tsconfig.json new file mode 100644 index 00000000..4aef1a7a --- /dev/null +++ b/examples/mastra-klavis/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "allowJs": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "outDir": "./dist", + "rootDir": "./", + "resolveJsonModule": true, + "lib": ["ES2022", "DOM"] + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules", + ] +} \ No newline at end of file diff --git a/examples/openai-klavis/Use_Klavis_with_OpenAI.ipynb b/examples/openai-klavis/Use_Klavis_with_OpenAI.ipynb new file mode 100644 index 00000000..a9292cc1 --- /dev/null +++ b/examples/openai-klavis/Use_Klavis_with_OpenAI.ipynb @@ -0,0 +1,359 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/openai/Use_Klavis_with_OpenAI.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# OpenAI + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to use OpenAI function calling with Klavis MCP (Model Context Protocol) servers.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU openai klavis requests\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "from openai import OpenAI\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Case Study 1 : OpenAI + YouTube MCP Server" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 1 - Create YouTube MCP Server using Klavis\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# print(f\"\ud83d\udd17 YouTube MCP server created at: {youtube_mcp_instance.server_url}, and the instance id is {youtube_mcp_instance.instance_id}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "#### Step 2 - Create general method to use MCP Server with OpenAI" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "def openai_with_mcp_server(mcp_server_url: str, user_query: str):\n", + " openai_client = OpenAI(api_key=os.getenv(\"OPENAI_API_KEY\"))\n", + "\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a helpful assistant. Use the available tools to answer the user's question.\"},\n", + " {\"role\": \"user\", \"content\": f\"{user_query}\"}\n", + " ]\n", + " \n", + " mcp_server_tools = klavis_client.mcp_server.list_tools(\n", + " server_url=mcp_server_url,\n", + " format=ToolFormat.OPENAI,\n", + " )\n", + " \n", + " response = openai_client.chat.completions.create(\n", + " model=\"gpt-4o-mini\",\n", + " messages=messages,\n", + " tools=mcp_server_tools.tools\n", + " )\n", + " \n", + " messages.append(response.choices[0].message)\n", + "\n", + " if response.choices[0].message.tool_calls:\n", + " for tool_call in response.choices[0].message.tool_calls:\n", + " function_name = tool_call.function.name\n", + " function_args = json.loads(tool_call.function.arguments)\n", + " \n", + " print(f\"\ud83d\udd27 Calling: {function_name}, with args: {function_args}\")\n", + " \n", + " result = klavis_client.mcp_server.call_tools(\n", + " server_url=mcp_server_url,\n", + " tool_name=function_name,\n", + " tool_args=function_args,\n", + " )\n", + " \n", + " messages.append({\n", + " \"role\": \"tool\",\n", + " \"tool_call_id\": tool_call.id,\n", + " \"content\": str(result)\n", + " })\n", + " \n", + " final_response = openai_client.chat.completions.create(\n", + " model=\"gpt-4o-mini\",\n", + " messages=messages\n", + " )\n", + " \n", + " return final_response.choices[0].message.content" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Step 3 - Summarize your favorite video! " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd27 Calling: get_youtube_video_transcript, with args: {'url': 'https://www.youtube.com/watch?v=LCEmiRjPEtQ'}\n", + "The YouTube video titled \"Andrej Karpathy: Software Is Changing (Again)\" features a keynote by Andrej Karpathy at the AI Startup School in San Francisco. The video discusses the evolution of software and introduces the concept of \"Software 3.0,\" where natural language processing becomes a new interface for programming.\n", + "\n", + "### Summary of Key Points:\n", + "\n", + "- **00:00 - Intro**: Karpathy introduces the topic of evolving software.\n", + " \n", + "- **01:25 - Software evolution: From 1.0 to 3.0**: Acknowledges that software has undergone significant changes, leading to the current phase of Software 3.0.\n", + "\n", + "- **04:40 - Programming in English: Rise of Software 3.0**: Discusses how programming is becoming more accessible with natural language, allowing users to interact with systems in English.\n", + "\n", + "- **06:10 - LLMs as utilities, fabs, and operating systems**: Highlights the role of Large Language Models (LLMs) as essential utilities in modern software.\n", + "\n", + "- **11:04 - The new LLM OS and historical computing analogies**: Compares the current state of LLMs to earlier computing eras, suggesting a transformative impact.\n", + "\n", + "- **14:39 - Psychology of LLMs: People spirits and cognitive quirks**: Mentions that LLMs reflect human-like characteristics and quirks since they are trained on human data.\n", + "\n", + "- **18:22 - Designing LLM apps with partial autonomy**: Explores how new applications can leverage LLMs' capabilities and partial autonomy.\n", + "\n", + "- **23:40 - The importance of human-AI collaboration loops**: Emphasizes the need for collaborative systems between humans and AI.\n", + "\n", + "- **26:00 - Lessons from Tesla Autopilot & autonomy sliders**: Shares insights from Tesla's experience with autonomous systems.\n", + "\n", + "- **27:52 - The Iron Man analogy: Augmentation vs. agents**: Uses the Iron Man analogy to differentiate between user augmentation and fully autonomous agents.\n", + "\n", + "- **29:06 - Vibe Coding: Everyone is now a programmer**: Hacks into the notion that traditional programming barriers are lowering, making programming accessible to more people.\n", + "\n", + "- **33:39 - Building for agents: Future-ready digital infrastructure**: Discusses the need for infrastructure that can support the integration of LLMs as central components.\n", + "\n", + "- **38:14 - Summary: We\u2019re in the 1960s of LLMs \u2014 time to build**: Concludes that we are at a pivotal moment in LLM development, akin to the early days of computing.\n", + "\n", + "### Conclusion:\n", + "Karpathy argues that we are experiencing a fundamental shift in how software operates, driven largely by the rise of LLMs and their integration into everyday applications. This change signifies a new era where programming becomes more conversational, making technology more accessible.\n" + ] + } + ], + "source": [ + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=LCEmiRjPEtQ\" # pick a video you like!\n", + "\n", + "result = openai_with_mcp_server(\n", + " mcp_server_url=youtube_mcp_instance.server_url, \n", + " user_query=f\"Please provide a complete summary of this YouTube video with timestamp: {YOUTUBE_VIDEO_URL}\"\n", + ")\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "\u2705 Great! You've successfully created an AI agent that uses OpenAI function calling with Klavis MCP servers to summarize YouTube videos!\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Case Study 2 : OpenAI + Gmail MCP Server (OAuth needed)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: https://api.klavis.ai/oauth/gmail/authorize?instance_id=d9d482b3-433a-4330-9a8b-9548c0b0a326\n" + ] + } + ], + "source": [ + "import webbrowser\n", + "\n", + "gmail_mcp_server = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "webbrowser.open(gmail_mcp_server.oauth_url)\n", + "\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail, if you are not redirected, please open the following URL in your browser: {gmail_mcp_server.oauth_url}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd27 Calling: send_email, with args: {'to': ['zihaolin@klavis.ai'], 'subject': 'Test OpenAI + Gmail MCP Server', 'body': 'Hello World'}\n", + "The email has been successfully sent to zihaolin@klavis.ai with the subject \"Test OpenAI + Gmail MCP Server\" and the body \"Hello World.\"\n" + ] + } + ], + "source": [ + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # Replace with your email\n", + "EMAIL_SUBJECT = \"Test OpenAI + Gmail MCP Server\"\n", + "EMAIL_BODY = \"Hello World\"\n", + "\n", + "result = openai_with_mcp_server(\n", + " mcp_server_url=gmail_mcp_server.server_url, \n", + " user_query=f\"Please send an email to {EMAIL_RECIPIENT} with subject {EMAIL_SUBJECT} and body {EMAIL_BODY}\"\n", + ")\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate OpenAI's function calling capabilities with Klavis MCP servers to create powerful AI applications. We covered two practical examples:\n", + "\n", + "**\ud83c\udfa5 YouTube Integration**: Built an AI assistant that can automatically summarize YouTube videos by extracting transcripts and providing detailed, timestamped summaries.\n", + "\n", + "**\ud83d\udce7 Gmail Integration**: Created an AI-powered email assistant that can send emails through Gmail with OAuth authentication.\n", + "\n", + "### Key Takeaways:\n", + "- **Easy Setup**: Klavis MCP servers can be created with just a few lines of code\n", + "- **OpenAI Compatible**: All tools are formatted for seamless OpenAI function calling\n", + "- **Versatile**: Support for both simple APIs (YouTube) and OAuth-authenticated services (Gmail)\n", + "- **Scalable**: The same pattern can be applied to any of the MCP servers available in Klavis\n", + "\n", + "**Happy building!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/openai-klavis/python/.env.example b/examples/openai-klavis/python/.env.example new file mode 100644 index 00000000..a8d453c5 --- /dev/null +++ b/examples/openai-klavis/python/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY=YOUR_OPENAI_API_KEY +KLAVIS_API_KEY=YOUR_KLAVIS_API_KEY diff --git a/examples/openai-klavis/python/main.py b/examples/openai-klavis/python/main.py new file mode 100755 index 00000000..6f115c74 --- /dev/null +++ b/examples/openai-klavis/python/main.py @@ -0,0 +1,170 @@ +import json +import os +import webbrowser +from typing import Dict, Any, List +from openai import OpenAI +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +def stream_chat_completion(client: OpenAI, messages: List[Dict[str, str]], klavis_client: Klavis, server_url: str) -> None: + """ + Stream chat completion from OpenAI with function calling support. + + Args: + client: OpenAI client instance + messages: List of conversation messages + klavis_client: Klavis client instance + server_url: MCP server URL + """ + try: + + tools_info = klavis_client.mcp_server.list_tools( + server_url=server_url, + format=ToolFormat.OPENAI + ) + + # Create streaming completion with function calling + stream = client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tools=tools_info.tools, + tool_choice="auto", + stream=True, + temperature=0.7 + ) + + # Add assistant message to messages list that we'll modify in place + messages.append({"role": "assistant", "content": ""}) + tool_calls = [] + current_tool_call = None + is_tool_call = False + + print("\nšŸ¤– Assistant: ", end="", flush=True) + + for chunk in stream: + if chunk.choices[0].delta.content: + # Regular content streaming + content = chunk.choices[0].delta.content + print(content, end="", flush=True) + messages[-1]["content"] += content + + elif chunk.choices[0].delta.tool_calls: + # Tool call streaming + is_tool_call = True + delta_tool_calls = chunk.choices[0].delta.tool_calls + + for delta_tool_call in delta_tool_calls: + if delta_tool_call.index is not None: + # Ensure we have enough tool calls in our list + while len(tool_calls) <= delta_tool_call.index: + tool_calls.append({ + "id": "", + "type": "function", + "function": {"name": "", "arguments": ""} + }) + + current_tool_call = tool_calls[delta_tool_call.index] + + if delta_tool_call.id: + current_tool_call["id"] += delta_tool_call.id + if delta_tool_call.function: + if delta_tool_call.function.name: + current_tool_call["function"]["name"] += delta_tool_call.function.name + if delta_tool_call.function.arguments: + current_tool_call["function"]["arguments"] += delta_tool_call.function.arguments + + + # Handle tool calls if present + if is_tool_call and tool_calls: + for tool_call in tool_calls: + if tool_call["function"]["name"]: + print(f"\nšŸ”§ Calling function: {tool_call['function']['name']}") + + # Execute function call + function_result = klavis_client.mcp_server.call_tools( + server_url=server_url, + tool_name=tool_call["function"]["name"], + tool_args=json.loads(tool_call["function"]["arguments"]) + ) + + # Add tool call to the assistant message already in messages + messages[-1]["tool_calls"] = tool_calls + messages[-1]["content"] = messages[-1]["content"] or None + + # Add tool result message + messages.append({ + "role": "tool", + "tool_call_id": tool_call["id"], + "content": str(function_result) + }) + + # Get final response with tool results + print("\nšŸ¤– Assistant: ", end="", flush=True) + final_stream = client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + stream=True + ) + + messages.append({"role": "assistant", "content": ""}) + for chunk in final_stream: + if chunk.choices[0].delta.content: + content = chunk.choices[0].delta.content + print(content, end="", flush=True) + messages[-1]["content"] += content + + print() # Final new line + + except Exception as e: + print(f"\nāŒ Error: {e}") + +def main(): + openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Create MCP server instance + mcp_instance = klavis_client.mcp_server.create_server_instance( + server_name=McpServerName.YOUTUBE, # Close CRM as an example + user_id="1234") + + # Open OAuth authorization if needed + if hasattr(mcp_instance, 'oauth_url') and mcp_instance.oauth_url: + webbrowser.open(mcp_instance.oauth_url) + print(f"šŸ” Opening OAuth authorization for Close CRM: {mcp_instance.oauth_url}") + print("Please complete the OAuth authorization in your browser before continuing...") + input("Press Enter after completing OAuth authorization...") + + messages = [ + { + "role": "system", + "content": "You are a helpful assistant with access to various Klavis MCP tools" + } + ] + + while True: + try: + user_input = input("\nšŸ‘¤ You: ").strip() + + if user_input.lower() in ['quit', 'exit', 'q']: + print("\nšŸ‘‹ Goodbye!") + break + + if not user_input: + continue + + messages.append({"role": "user", "content": user_input}) + + stream_chat_completion(openai_client, messages, klavis_client, mcp_instance.server_url) + + except KeyboardInterrupt: + print("\n\nšŸ‘‹ Goodbye!") + break + except EOFError: + print("\n\nšŸ‘‹ Goodbye!") + break + except Exception as e: + print(f"\nāŒ Unexpected error: {e}") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/openai-klavis/python/requirements.txt b/examples/openai-klavis/python/requirements.txt new file mode 100644 index 00000000..41a2f117 --- /dev/null +++ b/examples/openai-klavis/python/requirements.txt @@ -0,0 +1,3 @@ +openai>=1.84.0 +klavis>=1.1.0 +python-dotenv>=1.1.0 \ No newline at end of file diff --git a/examples/openai-klavis/strata-python/.env.example b/examples/openai-klavis/strata-python/.env.example new file mode 100644 index 00000000..a8d453c5 --- /dev/null +++ b/examples/openai-klavis/strata-python/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY=YOUR_OPENAI_API_KEY +KLAVIS_API_KEY=YOUR_KLAVIS_API_KEY diff --git a/examples/openai-klavis/strata-python/main.py b/examples/openai-klavis/strata-python/main.py new file mode 100755 index 00000000..35c3cc9d --- /dev/null +++ b/examples/openai-klavis/strata-python/main.py @@ -0,0 +1,116 @@ +import json +import os +import webbrowser +from typing import Dict, Any, List +from openai import OpenAI +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat + +from dotenv import load_dotenv +load_dotenv() + +def chat_completion(openai_client: OpenAI, messages: List[Dict[str, str]], klavis_client: Klavis, user_id: str, servers: List[McpServerName]) -> None: + """ + Non-streaming chat completion using OpenAI SDK with function calling support. + Handles multiple rounds of tool calls until a final response is ready. + Creates Strata server and handles OAuth within the method. + """ + # Create Strata server with the provided servers and user_id + response = klavis_client.mcp_server.create_strata_server( + servers=servers, + user_id=user_id + ) + + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + user_integration_auth = klavis_client.user.get_user_auth( + user_id=user_id, + server_name=server_name + ) + if not user_integration_auth.is_authenticated: # only if not authenticated, open the oauth url and wait for the user to complete the authorization + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + server_url = response.strata_server_url + + tools_info = klavis_client.mcp_server.list_tools( + server_url=server_url, + format=ToolFormat.OPENAI + ) + + max_iterations = 20 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + # Use OpenAI SDK for chat completion + response = openai_client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tools=tools_info.tools, + tool_choice="auto" + ) + + assistant_message = response.choices[0].message + + if assistant_message.tool_calls: + messages.append({ + "role": "assistant", + "content": assistant_message.content, + "tool_calls": [ + { + "id": tc.id, + "type": "function", + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments + } + } + for tc in assistant_message.tool_calls + ] + }) + + for tool_call in assistant_message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) + + print(f"Calling: {tool_name}") + print(f"Arguments: {json.dumps(tool_args, indent=2)}") + + function_result = klavis_client.mcp_server.call_tools( + server_url=server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(function_result) + }) + continue + else: + messages.append({"role": "assistant", "content": assistant_message.content}) + print(f"\nšŸ¤– Assistant: {assistant_message.content}") + break + + +def main(): + openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + servers = [McpServerName.GMAIL] + user_id = "54321" + + messages = [{"role": "system", "content": "You are a helpful assistant with access to various MCP tools"}] + + while True: + user_input = input("\nšŸ‘¤ You: ").strip() + if user_input: + messages.append({"role": "user", "content": user_input}) + chat_completion(openai_client, messages, klavis_client, user_id, servers) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/openai-klavis/strata-python/requests_main.py b/examples/openai-klavis/strata-python/requests_main.py new file mode 100644 index 00000000..ea92df7e --- /dev/null +++ b/examples/openai-klavis/strata-python/requests_main.py @@ -0,0 +1,249 @@ +import json +import os +import webbrowser +from typing import Dict, Any, List +import requests +from openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() + +KLAVIS_API_BASE_URL = "https://api.klavis.ai/" + + +class KlavisClient: + """Klavis API client using requests library""" + + def __init__(self, api_key: str): + self.api_key = api_key + self.base_url = KLAVIS_API_BASE_URL + self.headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + + def create_strata_server(self, servers: List[str], user_id: str) -> Dict[str, Any]: + """Create a Strata MCP server instance.""" + url = f"{self.base_url}/mcp-server/strata/create" + payload = {"userId": user_id, "servers": servers} + response = requests.post(url, headers=self.headers, json=payload) + response.raise_for_status() + return response.json() + + def list_tools(self, server_url: str, format: str = "openai") -> Dict[str, Any]: + """List all available tools from the MCP server.""" + url = f"{self.base_url}/mcp-server/list-tools" + payload = {"serverUrl": server_url, "format": format} + response = requests.post(url, headers=self.headers, json=payload) + response.raise_for_status() + return response.json() + + def call_tool(self, server_url: str, tool_name: str, tool_args: Dict[str, Any]) -> Dict[str, Any]: + """Call a tool on the MCP server.""" + url = f"{self.base_url}/mcp-server/call-tool" + payload = {"serverUrl": server_url, "toolName": tool_name, "toolArgs": tool_args} + response = requests.post(url, headers=self.headers, json=payload) + response.raise_for_status() + return response.json() + + +def stream_chat_completion(client: OpenAI, messages: List[Dict[str, str]], klavis_client: KlavisClient, server_url: str) -> None: + """Stream chat completion from OpenAI with function calling support.""" + tools_response = klavis_client.list_tools(server_url=server_url, format="openai") + tools_info = tools_response.get("tools", []) + + max_iterations = 20 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + stream = client.chat.completions.create( + model="gpt-4.1", + messages=messages, + tools=tools_info, + tool_choice="auto", + stream=True, + ) + + tool_calls = [] + is_tool_call = False + content = "" + + if iteration == 1 or messages[-1].get("role") == "tool": + print("\nšŸ¤– Assistant: ", end="", flush=True) + + for chunk in stream: + if chunk.choices[0].delta.content: + chunk_content = chunk.choices[0].delta.content + print(chunk_content, end="", flush=True) + content += chunk_content + + elif chunk.choices[0].delta.tool_calls: + is_tool_call = True + + for delta_tool_call in chunk.choices[0].delta.tool_calls: + if delta_tool_call.index is not None: + while len(tool_calls) <= delta_tool_call.index: + tool_calls.append({"id": "", "type": "function", "function": {"name": "", "arguments": ""}}) + + current_tool_call = tool_calls[delta_tool_call.index] + + if delta_tool_call.id: + current_tool_call["id"] += delta_tool_call.id + if delta_tool_call.function: + if delta_tool_call.function.name: + current_tool_call["function"]["name"] += delta_tool_call.function.name + if delta_tool_call.function.arguments: + current_tool_call["function"]["arguments"] += delta_tool_call.function.arguments + + if is_tool_call and tool_calls: + messages.append({ + "role": "assistant", + "content": content or None, + "tool_calls": tool_calls + }) + + for tool_call in tool_calls: + if tool_call["function"]["name"]: + tool_name = tool_call["function"]["name"] + tool_args = json.loads(tool_call["function"]["arguments"]) + + print(f"\nšŸ”§ Calling tool: {tool_name}") + print(f" Arguments: {json.dumps(tool_args, indent=2)}") + + try: + function_response = klavis_client.call_tool( + server_url=server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + if function_response.get("success"): + result = function_response.get("result", {}) + function_result = result.get("content", result) + else: + function_result = {"error": function_response.get("error", "Unknown error")} + + except Exception as e: + function_result = {"error": str(e)} + + messages.append({ + "role": "tool", + "tool_call_id": tool_call["id"], + "content": json.dumps(function_result) if isinstance(function_result, dict) else str(function_result) + }) + continue + else: + if content: + messages.append({"role": "assistant", "content": content}) + print() + break + + +def chat_completion(openai_api_key: str, messages: List[Dict[str, str]], klavis_client: KlavisClient, server_url: str) -> None: + """Non-streaming chat completion from OpenAI with function calling support using HTTP requests.""" + tools_response = klavis_client.list_tools(server_url=server_url, format="openai") + tools_info = tools_response.get("tools", []) + + max_iterations = 10 + iteration = 0 + + openai_url = "https://api.openai.com/v1/chat/completions" + openai_headers = { + "Authorization": f"Bearer {openai_api_key}", + "Content-Type": "application/json" + } + + while iteration < max_iterations: + iteration += 1 + + payload = { + "model": "gpt-4.1", + "messages": messages, + "tools": tools_info, + "tool_choice": "auto" + } + + response = requests.post(openai_url, headers=openai_headers, json=payload) + response.raise_for_status() + response_data = response.json() + + assistant_message = response_data["choices"][0]["message"] + + if assistant_message.get("tool_calls"): + messages.append({ + "role": "assistant", + "content": assistant_message.get("content"), + "tool_calls": assistant_message["tool_calls"] + }) + + for tool_call in assistant_message["tool_calls"]: + tool_name = tool_call["function"]["name"] + tool_args = json.loads(tool_call["function"]["arguments"]) + + print(f"\nšŸ”§ Calling tool: {tool_name}") + print(f" Arguments: {json.dumps(tool_args, indent=2)}") + + try: + function_response = klavis_client.call_tool( + server_url=server_url, + tool_name=tool_name, + tool_args=tool_args + ) + + if function_response.get("success"): + result = function_response.get("result", {}) + function_result = result.get("content", result) + else: + function_result = {"error": function_response.get("error", "Unknown error")} + + except Exception as e: + function_result = {"error": str(e)} + + messages.append({ + "role": "tool", + "tool_call_id": tool_call["id"], + "content": json.dumps(function_result) if isinstance(function_result, dict) else str(function_result) + }) + continue + else: + messages.append({"role": "assistant", "content": assistant_message.get("content")}) + print(f"\nšŸ¤– Assistant: {assistant_message.get('content')}") + break + + +def main(): + openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + klavis_client = KlavisClient(api_key=os.getenv("KLAVIS_API_KEY")) + + response = klavis_client.create_strata_server( + servers=["github"], + user_id="4321" + ) + + github_oauth_url = response.get("oauthUrls", {}).get("github") + if github_oauth_url: + webbrowser.open(github_oauth_url) + input(f"Press Enter after completing GitHub OAuth authorization...") + + all_oauth_urls = response.get("oauthUrls", {}) + if all_oauth_urls: + for server_name, oauth_url in all_oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + server_url = response.get("strataServerUrl") + messages = [{"role": "system", "content": "You are a helpful assistant with access to various MCP tools"}] + + while True: + user_input = input("\nšŸ‘¤ You: ").strip() + if not user_input: + continue + + messages.append({"role": "user", "content": user_input}) + stream_chat_completion(openai_client, messages, klavis_client, server_url) + + +if __name__ == "__main__": + main() diff --git a/examples/openai-klavis/strata-python/requirements.txt b/examples/openai-klavis/strata-python/requirements.txt new file mode 100644 index 00000000..d6e8efd9 --- /dev/null +++ b/examples/openai-klavis/strata-python/requirements.txt @@ -0,0 +1,4 @@ +openai>=1.109.1 +klavis==2.9.0 +python-dotenv>=1.1.0 +requests>=2.31.0 \ No newline at end of file diff --git a/examples/openai-klavis/typescript/.env.example b/examples/openai-klavis/typescript/.env.example new file mode 100644 index 00000000..a8d453c5 --- /dev/null +++ b/examples/openai-klavis/typescript/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY=YOUR_OPENAI_API_KEY +KLAVIS_API_KEY=YOUR_KLAVIS_API_KEY diff --git a/examples/openai-klavis/typescript/demo.ts b/examples/openai-klavis/typescript/demo.ts new file mode 100644 index 00000000..e7b7bc93 --- /dev/null +++ b/examples/openai-klavis/typescript/demo.ts @@ -0,0 +1,148 @@ +import * as dotenv from 'dotenv'; +import OpenAI from 'openai'; +import { KlavisClient, Klavis } from 'klavis'; +import open from 'open'; + +// Load environment variables +dotenv.config(); + +const openaiApiKey = process.env.OPENAI_API_KEY; +const klavisApiKey = process.env.KLAVIS_API_KEY; + +if (!openaiApiKey) { + throw new Error('OPENAI_API_KEY is not set in the environment variables.'); +} +if (!klavisApiKey) { + throw new Error('KLAVIS_API_KEY is not set in the environment variables.'); +} + +const openaiClient = new OpenAI({ apiKey: openaiApiKey }); +const klavisClient = new KlavisClient({ apiKey: klavisApiKey }); + +type Message = { + role: 'system' | 'user' | 'assistant' | 'tool'; + content: string | null; + tool_calls?: any[]; + tool_call_id?: string; +}; + +/** + * General method to use MCP Server with OpenAI + */ +async function openaiWithMcpServer(mcpServerUrl: string, userQuery: string): Promise { + const messages: Message[] = [ + { role: "system", content: "You are a helpful assistant. Use the available tools to answer the user's question." }, + { role: "user", content: userQuery } + ]; + + // Get tools from MCP server + const mcpServerTools = await klavisClient.mcpServer.listTools({ + serverUrl: mcpServerUrl, + format: Klavis.ToolFormat.Openai + }); + + const openaiTools = mcpServerTools.tools || []; + + const response = await openaiClient.chat.completions.create({ + model: "gpt-4o-mini", + messages: messages as any, + tools: mcpServerTools.tools as any + }); + + messages.push(response.choices[0].message as any); + + // Handle tool calls if any + if (response.choices[0].message.tool_calls) { + for (const toolCall of response.choices[0].message.tool_calls) { + const functionName = toolCall.function.name; + const functionArgs = JSON.parse(toolCall.function.arguments); + + console.log(`šŸ”§ Calling: ${functionName}, with args:`, functionArgs); + + // Call the tool via Klavis + const result = await klavisClient.mcpServer.callTools({ + serverUrl: mcpServerUrl, + toolName: functionName, + toolArgs: functionArgs + }); + + messages.push({ + role: "tool", + tool_call_id: toolCall.id, + content: JSON.stringify(result) + }); + } + } + + // Final completion with tool results + const finalResponse = await openaiClient.chat.completions.create({ + model: "gpt-4o-mini", + messages: messages as any + }); + + return finalResponse.choices[0].message.content || ""; +} + +/** + * Gmail Integration Demo (OAuth required) + */ +async function demoGmailIntegration() { + console.log("\nšŸ“§ === OpenAI + Gmail MCP Server Demo (OAuth needed) ===\n"); + + // Step 1: Create Gmail MCP Server + console.log("šŸ“Ø Creating Gmail MCP server instance..."); + const gmailMcpServer = await klavisClient.mcpServer.createServerInstance({ + serverName: Klavis.McpServerName.Gmail, + userId: "1234"}); + + // Step 2: Handle OAuth authentication + if (gmailMcpServer.oauthUrl) { + console.log("šŸ” Opening OAuth authorization for Gmail..."); + await open(gmailMcpServer.oauthUrl); + } + + console.log("ā³ Please complete the OAuth authorization in your browser."); + console.log("šŸ“‹ Press Enter after completing OAuth authorization..."); + + // Wait for user input (press enter to continue) + await new Promise(resolve => { + process.stdin.once('data', () => resolve(true)); + }); + + // Step 3: Send test email + const EMAIL_RECIPIENT = "zihaolin@klavis.ai"; // Replace with your email + const EMAIL_SUBJECT = "Test OpenAI + Gmail MCP Server"; + const EMAIL_BODY = "Hello World from TypeScript demo!"; + + try { + const result = await openaiWithMcpServer( + gmailMcpServer.serverUrl, + `Please send an email to ${EMAIL_RECIPIENT} with subject "${EMAIL_SUBJECT}" and body "${EMAIL_BODY}"` + ); + + console.log("šŸ“§ Email Result:"); + console.log(result); + console.log("\nāœ… Gmail integration completed successfully!\n"); + } catch (error) { + console.error("āŒ Gmail integration failed:", error); + console.log("šŸ’” Make sure you've completed the OAuth authorization process.\n"); + } +} + +async function main() { + try { + await demoGmailIntegration(); + + console.log("šŸŽ‰ Demo completed successfully!"); + console.log("šŸ› ļø You can now build powerful AI applications with OpenAI + Klavis MCP servers!"); + + } catch (error) { + console.error("āŒ Demo failed:", error); + process.exit(1); + } +} + +// Run the demo +if (require.main === module) { + main().catch(console.error); +} diff --git a/examples/openai-klavis/typescript/package-lock.json b/examples/openai-klavis/typescript/package-lock.json new file mode 100644 index 00000000..e2437a8a --- /dev/null +++ b/examples/openai-klavis/typescript/package-lock.json @@ -0,0 +1,1073 @@ +{ + "name": "openai-example-typescript", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "openai-example-typescript", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "axios": "^1.12.0", + "dotenv": "^16.4.5", + "klavis": "^0.1.1", + "open": "^8.4.0", + "openai": "^4.52.7", + "readline-sync": "^1.4.10" + }, + "devDependencies": { + "@types/node": "^20.19.0", + "@types/readline-sync": "^1.4.8", + "ts-node": "^10.9.2", + "typescript": "^5.5.3" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.0.tgz", + "integrity": "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/readline-sync": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/@types/readline-sync/-/readline-sync-1.4.8.tgz", + "integrity": "sha512-BL7xOf0yKLA6baAX6MMOnYkoflUyj/c7y3pqMRfU0va7XlwHAOTOIo4x55P/qLfMsuaYdJJKubToLqRVmRtRZA==", + "dev": true, + "license": "MIT" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "license": "MIT", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agentkeepalive": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz", + "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", + "license": "MIT", + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.0.tgz", + "integrity": "sha512-oXTDccv8PcfjZmPGlWsPSwtOJCZ/b6W5jAMCNcfwJbCzDckwG0jrYJFaWH1yvivfCXjVzV/SPDEhMB3Q+DSurg==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dotenv": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", + "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", + "license": "MIT" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "license": "MIT", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/js-base64": { + "version": "3.7.7", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.7.tgz", + "integrity": "sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==", + "license": "BSD-3-Clause" + }, + "node_modules/klavis": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/klavis/-/klavis-0.1.1.tgz", + "integrity": "sha512-bQT36fq0a2dB6FJ1HcI/Rls9+ZvA+cQoeXp8Y41QhDccoAS5WAwdIai88dUTcstRQw1dI0je5ETFZj4e6zOWrQ==", + "dependencies": { + "form-data": "^4.0.0", + "formdata-node": "^6.0.3", + "js-base64": "3.7.7", + "node-fetch": "^2.7.0", + "qs": "^6.13.1", + "readable-stream": "^4.5.2", + "url-join": "4.0.1" + } + }, + "node_modules/klavis/node_modules/formdata-node": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-6.0.3.tgz", + "integrity": "sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/openai": { + "version": "4.104.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.104.0.tgz", + "integrity": "sha512-p99EFNsA/yX6UhVO93f5kJsDRLAg+CTA2RBqdHK4RtK8u5IJw32Hyb2dTGKbnnFmnuoBv5r7Z2CURI9sGZpSuA==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, + "node_modules/openai/node_modules/@types/node": { + "version": "18.19.111", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.111.tgz", + "integrity": "sha512-90sGdgA+QLJr1F9X79tQuEut0gEYIfkX9pydI4XGRgvFo9g2JWswefI+WUSUHPYVBHYSEfTEqBxA5hQvAZB3Mw==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/openai/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "license": "MIT", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/readline-sync": { + "version": "1.4.10", + "resolved": "https://registry.npmjs.org/readline-sync/-/readline-sync-1.4.10.tgz", + "integrity": "sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==", + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" + }, + "node_modules/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", + "license": "MIT" + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + } + } +} diff --git a/examples/openai-klavis/typescript/package.json b/examples/openai-klavis/typescript/package.json new file mode 100644 index 00000000..e6be1198 --- /dev/null +++ b/examples/openai-klavis/typescript/package.json @@ -0,0 +1,29 @@ +{ + "name": "openai-example-typescript", + "version": "1.0.0", + "description": "TypeScript example for OpenAI function calling with Klavis", + "main": "main.js", + "scripts": { + "start": "ts-node main.ts", + "demo": "ts-node demo.ts", + "build": "tsc", + "debug": "node --inspect-brk -r ts-node/register main.ts", + "debug:compiled": "tsc && node --inspect-brk dist/main.js" + }, + "author": "", + "license": "ISC", + "dependencies": { + "axios": "^1.12.0", + "dotenv": "^16.4.5", + "klavis": "^0.1.1", + "open": "^8.4.0", + "openai": "^4.52.7", + "readline-sync": "^1.4.10" + }, + "devDependencies": { + "@types/node": "^20.19.0", + "@types/readline-sync": "^1.4.8", + "ts-node": "^10.9.2", + "typescript": "^5.5.3" + } +} diff --git a/examples/openai-klavis/typescript/tsconfig.json b/examples/openai-klavis/typescript/tsconfig.json new file mode 100644 index 00000000..51b451e2 --- /dev/null +++ b/examples/openai-klavis/typescript/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "./dist", + "rootDir": "./", + "sourceMap": true, + "declaration": true, + "removeComments": false + }, + "include": [ + "*.ts" + ], + "exclude": [ + "node_modules" + ] +} \ No newline at end of file diff --git a/examples/strata-cookbooks/python/claude/.env.example b/examples/strata-cookbooks/python/claude/.env.example new file mode 100644 index 00000000..1a09ec43 --- /dev/null +++ b/examples/strata-cookbooks/python/claude/.env.example @@ -0,0 +1,7 @@ +# Klavis API Configuration +# Get your API key from: https://klavis.ai/home +KLAVIS_API_KEY=your_klavis_api_key_here + +# Anthropic API Configuration +# Get your API key from: https://console.anthropic.com/ +ANTHROPIC_API_KEY=your_anthropic_api_key_here diff --git a/examples/strata-cookbooks/python/claude/Use_Strata_with_Claude.ipynb b/examples/strata-cookbooks/python/claude/Use_Strata_with_Claude.ipynb new file mode 100644 index 00000000..2f43857b --- /dev/null +++ b/examples/strata-cookbooks/python/claude/Use_Strata_with_Claude.ipynb @@ -0,0 +1,229 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "cell-0", + "metadata": {}, + "source": [ + "# Claude + Strata Integration\n", + "\n", + "This tutorial demonstrates how to build AI agents using Anthropic's Claude with Klavis Strata MCP servers for enhanced functionality." + ] + }, + { + "cell_type": "markdown", + "id": "cell-1", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **Anthropic API key** - Get at [console.anthropic.com](https://console.anthropic.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cell-2", + "metadata": {}, + "outputs": [], + "source": [ + "# Install the required packages\n", + "%pip install -q klavis python-dotenv anthropic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cell-3", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import webbrowser\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "from anthropic import Anthropic\n", + "\n", + "# Set environment variables\n", + "os.environ[\"ANTHROPIC_API_KEY\"] = \"YOUR_ANTHROPIC_API_KEY\" # Replace with your actual Anthropic API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "id": "cell-4", + "metadata": {}, + "source": [ + "## Step 1: Create Strata MCP Server\n", + "\n", + "Create a unified MCP server that combines multiple services (Gmail and Slack) for enhanced agent capabilities." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cell-5", + "metadata": {}, + "outputs": [], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create a Strata MCP server with Gmail and Slack integrations\n", + "response = klavis_client.mcp_server.create_strata_server(\n", + " user_id=\"1234\",\n", + " servers=[McpServerName.GMAIL, McpServerName.SLACK],\n", + ")\n", + "\n", + "print(f\"šŸš€ Strata MCP server created successfully!\")\n", + "\n", + "# Handle OAuth authorization if needed\n", + "if response.oauth_urls:\n", + " for server_name, oauth_url in response.oauth_urls.items():\n", + " webbrowser.open(oauth_url)\n", + " print(f\"šŸ” Opening OAuth authorization for {server_name}\")\n", + " input(f\"Press Enter after completing {server_name} OAuth authorization...\")" + ] + }, + { + "cell_type": "markdown", + "id": "cell-6", + "metadata": {}, + "source": [ + "## Step 2: Setup Claude with MCP Tools\n", + "\n", + "Set up Claude to use tools from the Strata MCP server." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cell-7", + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize Claude client\n", + "claude_client = Anthropic(api_key=os.getenv(\"ANTHROPIC_API_KEY\"))\n", + "\n", + "# Get MCP server tools in Anthropic format\n", + "mcp_server_tools = klavis_client.mcp_server.list_tools(\n", + " server_url=response.strata_server_url,\n", + " format=ToolFormat.ANTHROPIC\n", + ")\n", + "\n", + "print(f\"šŸ¤– Claude configured with {len(mcp_server_tools.tools)} MCP tools!\")" + ] + }, + { + "cell_type": "markdown", + "id": "cell-8", + "metadata": {}, + "source": [ + "## Step 3: Create Agent Loop\n", + "\n", + "Implement an agentic loop that allows Claude to use MCP tools to complete tasks." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cell-9", + "metadata": {}, + "outputs": [], + "source": [ + "# Define user query\n", + "user_query = \"Check my latest 5 emails and summarize them in a Slack message to #general channel\"\n", + "\n", + "messages = [\n", + " {\"role\": \"user\", \"content\": user_query}\n", + "]\n", + "\n", + "# Agent loop to handle tool calls\n", + "max_iterations = 10\n", + "iteration = 0\n", + "\n", + "while iteration < max_iterations:\n", + " iteration += 1\n", + " \n", + " claude_response = claude_client.messages.create(\n", + " model=\"claude-sonnet-4-5-20250929\",\n", + " max_tokens=4000,\n", + " system=\"You are a helpful assistant. Use the available tools to answer the user's question.\",\n", + " messages=messages,\n", + " tools=mcp_server_tools.tools\n", + " )\n", + " \n", + " messages.append({\"role\": \"assistant\", \"content\": claude_response.content})\n", + " \n", + " if claude_response.stop_reason == \"tool_use\":\n", + " tool_results = []\n", + " \n", + " for content_block in claude_response.content:\n", + " if content_block.type == \"tool_use\":\n", + " function_name = content_block.name\n", + " function_args = content_block.input\n", + " \n", + " print(f\"šŸ”§ Calling: {function_name}, with args: {function_args}\")\n", + " \n", + " result = klavis_client.mcp_server.call_tools(\n", + " server_url=response.strata_server_url,\n", + " tool_name=function_name,\n", + " tool_args=function_args\n", + " )\n", + " \n", + " tool_results.append({\n", + " \"type\": \"tool_result\",\n", + " \"tool_use_id\": content_block.id,\n", + " \"content\": str(result)\n", + " })\n", + " \n", + " messages.append({\"role\": \"user\", \"content\": tool_results})\n", + " continue\n", + " else:\n", + " final_response = claude_response.content[0].text\n", + " print(\"\\nāœ… Final Response:\")\n", + " print(final_response)\n", + " break" + ] + }, + { + "cell_type": "markdown", + "id": "cell-10", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "šŸŽ‰ Congratulations! You've successfully created a Claude agent that can:\n", + "\n", + "1. **Read emails** using the Gmail MCP server\n", + "2. **Send Slack messages** using the Slack MCP server\n", + "3. **Coordinate multiple services** through Klavis Strata MCP integration\n", + "\n", + "This demonstrates the power of combining Claude's advanced reasoning capabilities with Klavis MCP servers for building sophisticated AI workflows that can interact with multiple external services seamlessly." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/strata-cookbooks/python/claude/main.py b/examples/strata-cookbooks/python/claude/main.py new file mode 100644 index 00000000..9201bfea --- /dev/null +++ b/examples/strata-cookbooks/python/claude/main.py @@ -0,0 +1,95 @@ +import os +import asyncio +import webbrowser + +from klavis import Klavis +from klavis.types import McpServerName, ToolFormat +from anthropic import Anthropic + +from dotenv import load_dotenv +load_dotenv() + +async def main(): + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and Slack integrations + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" + ) + + # Step 2: Handle OAuth authorization for each services + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + # Step 3: Setup Claude client + claude_client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY")) + + # Step 4: Get MCP server tools in Anthropic format + mcp_server_tools = klavis_client.mcp_server.list_tools( + server_url=response.strata_server_url, + format=ToolFormat.ANTHROPIC + ) + + # Step 5: Define user query + user_query = "Check my latest 5 emails and summarize them in a Slack message to #general channel" + + messages = [ + {"role": "user", "content": user_query} + ] + + # Step 6: Agent loop to handle tool calls + max_iterations = 10 + iteration = 0 + + while iteration < max_iterations: + iteration += 1 + + claude_response = claude_client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=4000, + system="You are a helpful assistant. Use the available tools to answer the user's question.", + messages=messages, + tools=mcp_server_tools.tools + ) + + messages.append({"role": "assistant", "content": claude_response.content}) + + if claude_response.stop_reason == "tool_use": + tool_results = [] + + for content_block in claude_response.content: + if content_block.type == "tool_use": + function_name = content_block.name + function_args = content_block.input + + print(f"šŸ”§ Calling: {function_name}, with args: {function_args}") + + result = klavis_client.mcp_server.call_tools( + server_url=response.strata_server_url, + tool_name=function_name, + tool_args=function_args + ) + + tool_results.append({ + "type": "tool_result", + "tool_use_id": content_block.id, + "content": str(result) + }) + + messages.append({"role": "user", "content": tool_results}) + continue + else: + # Print only the final AI response content + final_response = claude_response.content[0].text + print(f"\nšŸ¤– Final Response: {final_response}") + return final_response + + print("Max iterations reached without final response") + return None + + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/examples/strata-cookbooks/python/claude/requirements.txt b/examples/strata-cookbooks/python/claude/requirements.txt new file mode 100644 index 00000000..2bed226e --- /dev/null +++ b/examples/strata-cookbooks/python/claude/requirements.txt @@ -0,0 +1,3 @@ +klavis>=2.0.2 +python-dotenv>=1.1.0 +anthropic>=0.42.0 \ No newline at end of file diff --git a/examples/strata-cookbooks/python/crewai/.env.example b/examples/strata-cookbooks/python/crewai/.env.example new file mode 100644 index 00000000..a651d7d3 --- /dev/null +++ b/examples/strata-cookbooks/python/crewai/.env.example @@ -0,0 +1,7 @@ +# Klavis API Configuration +# Get your API key from: https://www.klavis.ai/home/mcp-servers +KLAVIS_API_KEY=your_klavis_api_key_here + +# OpenAI API Configuration +# Get your API key from: https://platform.openai.com/api-keys +OPENAI_API_KEY=your_openai_api_key_here diff --git a/examples/strata-cookbooks/python/crewai/Use_Strata_with_Crew.ipynb b/examples/strata-cookbooks/python/crewai/Use_Strata_with_Crew.ipynb new file mode 100644 index 00000000..8586d807 --- /dev/null +++ b/examples/strata-cookbooks/python/crewai/Use_Strata_with_Crew.ipynb @@ -0,0 +1,187 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Crew + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to build AI agents using CrewAI with Klavis Strata MCP servers for enhanced functionality." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Install the required packages\n", + "%pip install -q klavis python-dotenv crewai crewai-tools[mcp] openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import webbrowser\n", + "from crewai import Agent, Task, Crew, Process\n", + "from crewai_tools import MCPServerAdapter\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 1: Create Strata MCP Server\n", + "\n", + "Create Strata MCP server that combines multiple services (Gmail and Slack) for enhanced agent capabilities." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create a Strata MCP server with Gmail and Slack integrations\n", + "response = klavis_client.mcp_server.create_strata_server(\n", + " servers=[McpServerName.GMAIL, McpServerName.SLACK], \n", + " user_id=\"1234\"\n", + ")\n", + "\n", + "print(f\"šŸ”— Strata MCP server created at: {response.strata_server_url}\")\n", + "\n", + "# Handle OAuth authorization if needed\n", + "if response.oauth_urls:\n", + " for server_name, oauth_url in response.oauth_urls.items():\n", + " webbrowser.open(oauth_url)\n", + " print(f\"šŸ” Opening OAuth authorization for {server_name}: {oauth_url}\")\n", + " input(f\"Press Enter after completing {server_name} OAuth authorization...\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 2: Create and Run CrewAI Agent with MCP Tools\n", + "\n", + "Set up the CrewAI agent with tools from the Strata MCP server, create a task, and execute it." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Configure MCP server parameters\n", + "klavis_server_params = [\n", + " {\n", + " \"url\": response.strata_server_url,\n", + " \"transport\": \"streamable-http\"\n", + " }\n", + "]\n", + "\n", + "# Configure your query\n", + "user_query = \"Check my latest 5 emails and summarize them in a Slack message to #general\" # Change this query as needed\n", + "\n", + "# Create and run the crew with MCP tools\n", + "with MCPServerAdapter(klavis_server_params) as all_mcp_tools:\n", + " print(f\"āœ… Available tools: {[tool.name for tool in all_mcp_tools]}\")\n", + "\n", + " # Create CrewAI agent with MCP tools\n", + " klavis_agent = Agent(\n", + " role=\"Klavis Query Assistant\",\n", + " goal=\"Assist the user with their query using available tools\",\n", + " backstory=\"Expert at assisting users with their queries using available tools\",\n", + " tools=all_mcp_tools,\n", + " verbose=False,\n", + " llm=\"gpt-4o\" # Using OpenAI GPT-4o model\n", + " )\n", + "\n", + " # Create a task for the agent\n", + " klavis_task = Task(\n", + " description=f\"Answer the user's query: {user_query}\",\n", + " expected_output=\"Provide a detailed response to the user's query\",\n", + " agent=klavis_agent\n", + " )\n", + "\n", + " # Create a crew with the agent and task\n", + " crew = Crew(\n", + " agents=[klavis_agent],\n", + " tasks=[klavis_task],\n", + " process=Process.sequential,\n", + " verbose=True\n", + " )\n", + "\n", + " print(\"šŸš€ Executing crew...\")\n", + " \n", + " # Execute the crew\n", + " result = crew.kickoff()\n", + " \n", + " # Print the final AI response\n", + " print(\"\\nāœ… Result:\")\n", + " print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "šŸŽ‰ Congratulations! You've successfully created a CrewAI agent that can:\n", + "\n", + "1. **Read emails** using the Gmail MCP server\n", + "2. **Send Slack messages** using the Slack MCP server\n", + "3. **Coordinate multiple services** through Klavis Strata MCP integration\n", + "\n", + "This demonstrates the power of combining CrewAI's agent framework with Strata MCP server for building sophisticated AI workflows that can interact with multiple external services seamlessly." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/strata-cookbooks/python/crewai/main.py b/examples/strata-cookbooks/python/crewai/main.py new file mode 100644 index 00000000..d4c777a2 --- /dev/null +++ b/examples/strata-cookbooks/python/crewai/main.py @@ -0,0 +1,62 @@ +import os +import webbrowser +from crewai import Agent, Task, Crew, Process +from crewai_tools import MCPServerAdapter +from klavis import Klavis +from klavis.types import McpServerName + +from dotenv import load_dotenv +load_dotenv() + +klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + +response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.GMAIL, McpServerName.SLACK], + user_id="1234" +) + +# Handle OAuth authorization for each services +if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Or please open this URL to complete {server_name} OAuth authorization: {oauth_url}") + +def crew_mcp_server(mcp_server_url: str, user_query: str): + klavis_server_params = [ + { + "url": mcp_server_url, + "transport": "streamable-http" + } + ] + + with MCPServerAdapter(klavis_server_params) as all_mcp_tools: + print(f"āœ… Available tools: {[tool.name for tool in all_mcp_tools]}") + + klavis_agent = Agent( + role="Klavis Query Assistant", + goal="Assist the user with their query using available tools", + backstory="Expert at assisting users with their queries using available tools", + tools=all_mcp_tools, + verbose=False, + llm="gpt-4o" # Using OpenAI GPT-4o model + ) + + klavis_task = Task( + description=f"Answer the user's query: {user_query}", + expected_output="Provide a detailed response to the user's query", + agent=klavis_agent + ) + + crew = Crew( + agents = [klavis_agent], + tasks = [klavis_task], + process=Process.sequential, + verbose=True + ) + + result = crew.kickoff() + print(f"Crew result: {result}") + +if __name__ == "__main__": + user_query = "Check my latest 5 emails and summarize them in a Slack message to #general" # Change this query as needed + crew_mcp_server(response.strata_server_url,user_query) \ No newline at end of file diff --git a/examples/strata-cookbooks/python/crewai/requirements.txt b/examples/strata-cookbooks/python/crewai/requirements.txt new file mode 100644 index 00000000..e1ed38ad --- /dev/null +++ b/examples/strata-cookbooks/python/crewai/requirements.txt @@ -0,0 +1,4 @@ +klavis>=2.0.2 +python-dotenv>=1.1.0 +crewai>=0.130.0 +crewai-tools[mcp]>=0.48.0 \ No newline at end of file diff --git a/examples/strata-cookbooks/python/langchain/.env.example b/examples/strata-cookbooks/python/langchain/.env.example new file mode 100644 index 00000000..1b434d95 --- /dev/null +++ b/examples/strata-cookbooks/python/langchain/.env.example @@ -0,0 +1,7 @@ +# Klavis API Configuration +# Get your API key from: https://klavis.ai/home +KLAVIS_API_KEY=your_klavis_api_key_here + +# OpenAI API Configuration +# Get your API key from: https://platform.openai.com/api-keys +OPENAI_API_KEY=your_openai_api_key_here diff --git a/examples/strata-cookbooks/python/langchain/Use_Strata_with_LangChain.ipynb b/examples/strata-cookbooks/python/langchain/Use_Strata_with_LangChain.ipynb new file mode 100644 index 00000000..633f27a9 --- /dev/null +++ b/examples/strata-cookbooks/python/langchain/Use_Strata_with_LangChain.ipynb @@ -0,0 +1,180 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LangChain + Klavis AI Integration\n", + "\n", + "This tutorial demonstrates how to build AI agents using LangChain with Klavis Strata MCP servers for enhanced functionality.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Install the required packages\n", + "%pip install -q klavis python-dotenv langchain-mcp-adapters langgraph langchain-openai\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import webbrowser\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName\n", + "from langchain_openai import ChatOpenAI\n", + "from langchain_mcp_adapters.client import MultiServerMCPClient\n", + "from langgraph.prebuilt import create_react_agent\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 1: Create Strata MCP Server\n", + "\n", + "Create a unified MCP server that combines multiple services (Gmail and YouTube) for enhanced agent capabilities.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create a Strata MCP server with Gmail and YouTube integrations\n", + "response = klavis_client.mcp_server.create_strata_server(\n", + " user_id=\"demo_user\",\n", + " servers=[McpServerName.GMAIL, McpServerName.YOUTUBE],\n", + ")\n", + "\n", + "print(f\"šŸ”— Strata MCP server created at: {response.strata_server_url}\")\n", + "\n", + "# Handle OAuth authorization if needed\n", + "if response.oauth_urls:\n", + " for server_name, oauth_url in response.oauth_urls.items():\n", + " webbrowser.open(oauth_url)\n", + " print(f\"šŸ” Opening OAuth authorization for {server_name}: {oauth_url}\")\n", + " input(f\"Press Enter after completing {server_name} OAuth authorization...\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 2: Create LangChain Agent with MCP Tools\n", + "\n", + "Set up the LangChain agent with tools from the Strata MCP server.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create MCP client\n", + "mcp_client = MultiServerMCPClient({\n", + " \"strata\": {\n", + " \"transport\": \"streamable_http\",\n", + " \"url\": response.strata_server_url,\n", + " }\n", + "})\n", + "\n", + "# Get all available tools from Strata\n", + "tools = await mcp_client.get_tools()\n", + "\n", + "# Setup LLM\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\", api_key=os.getenv(\"OPENAI_API_KEY\"))\n", + "\n", + "# Create LangChain agent with MCP tools\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " prompt=\"You are a helpful assistant that can use MCP tools.\",\n", + ")\n", + "\n", + "print(\"šŸ¤– LangChain agent created with MCP tools!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 3: Run the Agent\n", + "\n", + "Use the agent to summarize a YouTube video and send the summary via email.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Configure your preferences\n", + "my_email = \"your-email@example.com\" # Replace with your email\n", + "youtube_video_url = \"https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7\" # Replace with your favorite video\n", + "\n", + "# Invoke the agent\n", + "result = await agent.ainvoke({\n", + " \"messages\": [{\n", + " \"role\": \"user\", \n", + " \"content\": f\"summarize this video - {youtube_video_url} and send the summary to my email {my_email}\"\n", + " }],\n", + "})\n", + "\n", + "# Print the final AI response\n", + "print(\"\\nāœ… Result:\")\n", + "print(result[\"messages\"][-1].content)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "šŸŽ‰ Congratulations! You've successfully created a LangChain agent that can:\n", + "\n", + "1. **Summarize YouTube videos** using the YouTube MCP server\n", + "2. **Send emails** using the Gmail MCP server \n", + "3. **Coordinate multiple services** through Klavis Strata MCP integration\n", + "\n", + "This demonstrates the power of combining LangChain's agent framework with Klavis MCP servers for building sophisticated AI workflows that can interact with multiple external services seamlessly.\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/strata-cookbooks/python/langchain/main.py b/examples/strata-cookbooks/python/langchain/main.py new file mode 100644 index 00000000..a9e09fb3 --- /dev/null +++ b/examples/strata-cookbooks/python/langchain/main.py @@ -0,0 +1,63 @@ +import os +import asyncio +import webbrowser + +from klavis import Klavis +from klavis.types import McpServerName +from langchain_openai import ChatOpenAI +from langchain_mcp_adapters.client import MultiServerMCPClient +from langgraph.prebuilt import create_react_agent + +from dotenv import load_dotenv +load_dotenv() + + +async def main(): + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and Google Calendar integrations + response = klavis_client.mcp_server.create_strata_server( + user_id="demo_user", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + + # Step 2: Handle OAuth authorization if needed + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + # Step 3: Create LangChain Agent with MCP Tools + mcp_client = MultiServerMCPClient({ + "strata": { + "transport": "streamable_http", + "url": response.strata_server_url, + } + }) + + # Get all available tools from Strata + tools = await mcp_client.get_tools() + # Setup LLM + llm = ChatOpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 4: Create LangChain agent with MCP tools + agent = create_react_agent( + model=llm, + tools=tools, + prompt=( + "You are a helpful assistant that can use MCP tools. " + ), + ) + + my_email = "golden-kpop@example.com" # TODO: Replace with your email + # Step 5: Invoke the agent + result = await agent.ainvoke({ + "messages": [{"role": "user", "content": f"summarize this video - https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7 and send the summary to my email {my_email}"}], + }) + + # Print only the final AI response content + print(result["messages"][-1].content) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/strata-cookbooks/python/langchain/requirements.txt b/examples/strata-cookbooks/python/langchain/requirements.txt new file mode 100644 index 00000000..9e025630 --- /dev/null +++ b/examples/strata-cookbooks/python/langchain/requirements.txt @@ -0,0 +1,5 @@ +klavis>=2.0.2 +python-dotenv>=1.1.0 +langchain-mcp-adapters>=0.1.9 +langgraph>=0.6.7 +langchain-openai>=0.3.33 \ No newline at end of file diff --git a/examples/strata-cookbooks/python/llamaindex/.env.example b/examples/strata-cookbooks/python/llamaindex/.env.example new file mode 100644 index 00000000..77b41f25 --- /dev/null +++ b/examples/strata-cookbooks/python/llamaindex/.env.example @@ -0,0 +1,5 @@ +# Klavis API Key - Get from https://klavis.ai/home +KLAVIS_API_KEY=your_klavis_api_key_here + +# OpenAI API Key - Get from https://platform.openai.com/api-keys +OPENAI_API_KEY=your_openai_api_key_here diff --git a/examples/strata-cookbooks/python/llamaindex/Use_Strata_with_LlamaIndex.ipynb b/examples/strata-cookbooks/python/llamaindex/Use_Strata_with_LlamaIndex.ipynb new file mode 100644 index 00000000..1f4bf4d2 --- /dev/null +++ b/examples/strata-cookbooks/python/llamaindex/Use_Strata_with_LlamaIndex.ipynb @@ -0,0 +1,238 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LlamaIndex + Strata Integration\n", + "\n", + "This tutorial demonstrates how to build AI agents using LlamaIndex with Klavis Strata MCP servers for enhanced functionality.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **OpenAI API key** - Get at [openai.com](https://openai.com/)\n", + "- **Klavis API key** - Get at [klavis.ai](https://klavis.ai/)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m25.1.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -q klavis python-dotenv llama-index-llms-openai llama-index-tools-mcp llama-index-core\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "import os\n", + "import webbrowser\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core.agent.workflow import FunctionAgent\n", + "from llama_index.tools.mcp import BasicMCPClient, aget_tools_from_mcp_url\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = \"YOUR_OPENAI_API_KEY\" # Replace with your actual OpenAI API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 1: Create Strata MCP Server\n", + "\n", + "Create a unified MCP server that combines multiple services (Gmail and YouTube) for enhanced agent capabilities.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "šŸš€ Strata MCP server created successfully!\n", + "šŸ” Opening OAuth authorization for Gmail\n" + ] + } + ], + "source": [ + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# Create a Strata MCP server with Gmail and YouTube integrations\n", + "response = klavis_client.mcp_server.create_strata_server(\n", + " user_id=\"1234\",\n", + " servers=[McpServerName.GMAIL, McpServerName.YOUTUBE],\n", + ")\n", + "\n", + "print(f\"šŸš€ Strata MCP server created successfully!\")\n", + "\n", + "# Handle OAuth authorization if needed\n", + "if response.oauth_urls:\n", + " for server_name, oauth_url in response.oauth_urls.items():\n", + " webbrowser.open(oauth_url)\n", + " print(f\"šŸ” Opening OAuth authorization for {server_name}\")\n", + " input(f\"Press Enter after completing {server_name} OAuth authorization...\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 2: Create LlamaIndex Agent with MCP Tools\n", + "\n", + "Set up the LlamaIndex FunctionAgent with tools from the Strata MCP server.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "šŸ¤– LlamaIndex agent created with MCP tools!\n" + ] + } + ], + "source": [ + "# Get all available tools from Strata\n", + "tools = await aget_tools_from_mcp_url(\n", + " response.strata_server_url, \n", + " client=BasicMCPClient(response.strata_server_url)\n", + ")\n", + "\n", + "# Setup LLM\n", + "llm = OpenAI(model=\"gpt-4o-mini\", api_key=os.getenv(\"OPENAI_API_KEY\"))\n", + "\n", + "# Create LlamaIndex agent with MCP tools\n", + "agent = FunctionAgent(\n", + " name=\"my_first_agent\",\n", + " description=\"Agent using MCP-based tools\",\n", + " tools=tools,\n", + " llm=llm,\n", + " system_prompt=\"You are an AI assistant that uses MCP tools.\",\n", + ")\n", + "\n", + "print(\"šŸ¤– LlamaIndex agent created with MCP tools!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 3: Run the Agent\n", + "\n", + "Use the agent to summarize a YouTube video and send the summary via email.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "āœ… Result:\n", + "### Video Summary\n", + "The video titled **ā€œGoldenā€ Official Lyric Video | KPop Demon Hunters | Sony Animation** features the official lyric video for the song \"Golden\" from the KPop Demon Hunters soundtrack. The KPop Demon Hunters series is available on Netflix and follows KPop superstars Rumi, Mira, and Zoey, who lead double lives as demon hunters protecting their fans from supernatural threats. The video showcases vibrant animations and catchy lyrics, appealing to fans of K-Pop and animation alike.\n", + "\n", + "**Key Details:**\n", + "- **Published At:** June 23, 2025\n", + "- **Channel:** Sony Pictures Animation\n", + "- **Duration:** 3 minutes and 19 seconds\n", + "- **View Count:** 372,127,057\n", + "- **Like Count:** 2,861,033\n", + "- **Comment Count:** 87,798\n", + "- **Watch Here:** [Golden - KPop Demon Hunters](https://www.youtube.com/watch?v=yebNIHKAC4A)\n", + "\n", + "### Email Sent\n", + "The summary has been successfully sent to your email at **your-email@example.com**.\n" + ] + } + ], + "source": [ + "# Configure your preferences\n", + "my_email = \"your-email@example.com\" # Replace with your email\n", + "youtube_video_url = \"https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7\" # Replace with your favorite video\n", + "\n", + "# Invoke the agent\n", + "response = await agent.run(\n", + " f\"summarize this video - {youtube_video_url} and mail this summary to my email {my_email}\"\n", + ")\n", + "\n", + "print(\"āœ… Result:\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "šŸŽ‰ Congratulations! You've successfully created a LlamaIndex agent that can:\n", + "\n", + "1. **Summarize YouTube videos** using the YouTube MCP server\n", + "2. **Send emails** using the Gmail MCP server\n", + "3. **Coordinate multiple services** through Klavis Strata MCP integration\n", + "\n", + "This demonstrates the power of combining LlamaIndex's FunctionAgent with Klavis MCP servers for building sophisticated AI workflows that can interact with multiple external services seamlessly.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/strata-cookbooks/python/llamaindex/main.py b/examples/strata-cookbooks/python/llamaindex/main.py new file mode 100644 index 00000000..04791ec0 --- /dev/null +++ b/examples/strata-cookbooks/python/llamaindex/main.py @@ -0,0 +1,60 @@ +import os +import asyncio +import webbrowser + +from klavis import Klavis +from klavis.types import McpServerName +from llama_index.llms.openai import OpenAI +from llama_index.core.agent.workflow import FunctionAgent +from llama_index.tools.mcp import BasicMCPClient +from llama_index.tools.mcp import ( + aget_tools_from_mcp_url, +) + +from dotenv import load_dotenv +load_dotenv() + +async def main(): + klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY")) + + # Step 1: Create a Strata MCP server with Gmail and YouTube integrations + response = klavis_client.mcp_server.create_strata_server( + user_id="1234", + servers=[McpServerName.GMAIL, McpServerName.YOUTUBE], + ) + + # Step 2: Handle OAuth authorization if needed + if response.oauth_urls: + for server_name, oauth_url in response.oauth_urls.items(): + webbrowser.open(oauth_url) + input(f"Press Enter after completing {server_name} OAuth authorization...") + + # Get all available tools from Strata + tools = await aget_tools_from_mcp_url( + response.strata_server_url, + client=BasicMCPClient(response.strata_server_url) + ) + + # Setup LLM + llm = OpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY")) + + # Step 3: Create LlamaIndex agent with MCP tools + agent = FunctionAgent( + name="my_first_agent", + description="Agent using MCP-based tools", + tools=tools, + llm=llm, + system_prompt="You are an AI assistant that uses MCP tools.", + ) + + my_email = "golden-kpop@example.com" # TODO: Replace with your email + youtube_video_url = "https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7" # TODO: Replace with your favorite youtube video URL + # Step 4: Invoke the agent + response = await agent.run( + f"summarize this video - {youtube_video_url} and mail this summary to my email {my_email}" + ) + + print(response) + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/examples/strata-cookbooks/python/llamaindex/requirements.txt b/examples/strata-cookbooks/python/llamaindex/requirements.txt new file mode 100644 index 00000000..1edd8dff --- /dev/null +++ b/examples/strata-cookbooks/python/llamaindex/requirements.txt @@ -0,0 +1,5 @@ +klavis>=2.0.2 +python-dotenv>=1.1.1 +llama-index-llms-openai>=0.5.6 +llama-index-tools-mcp>=0.4.1 +llama-index-core>=0.14.2 diff --git a/examples/strata-cookbooks/typescript/README.md b/examples/strata-cookbooks/typescript/README.md new file mode 100644 index 00000000..947fb0c0 --- /dev/null +++ b/examples/strata-cookbooks/typescript/README.md @@ -0,0 +1,44 @@ +# Strata + LangChain TypeScript Example + +This example demonstrates how to use Klavis Strata with LangChain in TypeScript to create an AI agent that can interact with Gmail and YouTube through MCP (Model Context Protocol). + +## Setup + +1. Install dependencies: +```bash +npm install +``` + +2. Create a `.env` file with your API keys: +```bash +# Klavis API Key - Get from https://klavis.io +KLAVIS_API_KEY=your_klavis_api_key_here + +# OpenAI API Key - Get from https://platform.openai.com +OPENAI_API_KEY=your_openai_api_key_here +``` + +3. Update the email address in the code: + - Replace `golden-kpop@example.com` with your actual email address + +## Running the Example + +```bash +# Development mode (with tsx) +npm run dev + +# Or build and run +npm run build +npm start +``` + +## What This Example Does + +1. **Creates a Strata MCP Server**: Sets up a server with Gmail and YouTube integrations +2. **Handles OAuth**: Opens browser windows for OAuth authorization when needed +3. **Creates LangChain Agent**: Sets up an AI agent with access to MCP tools +4. **Executes Task**: Asks the agent to summarize a YouTube video and email the summary + +## Note + +This is a simplified TypeScript port of the Python example. The full MCP client integration for TypeScript is still evolving, so some functionality may be limited compared to the Python version. diff --git a/examples/strata-cookbooks/typescript/build_with_langchain.ts b/examples/strata-cookbooks/typescript/build_with_langchain.ts new file mode 100644 index 00000000..ed4588af --- /dev/null +++ b/examples/strata-cookbooks/typescript/build_with_langchain.ts @@ -0,0 +1,71 @@ +import { config } from 'dotenv'; +import { Klavis } from 'klavis'; +import { ChatOpenAI } from '@langchain/openai'; +import { createReactAgent } from '@langchain/langgraph/prebuilt'; +import open from 'open'; +import { createInterface } from 'readline/promises'; + +// Load environment variables +config(); + +async function main() { + const klavisClient = new Klavis({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Step 1: Create a Strata MCP server with Gmail and YouTube integrations + const response = await klavisClient.mcpServer.createStrataServer({ + userId: 'demo_user', + servers: [Klavis.McpServerName.Gmail, Klavis.McpServerName.Youtube], + }); + + // Step 2: Handle OAuth authorization if needed + if (response.oauthUrls) { + const rl = createInterface({ + input: process.stdin, + output: process.stdout, + }); + + for (const [serverName, oauthUrl] of Object.entries(response.oauthUrls)) { + await open(oauthUrl); + await rl.question(`Press Enter after completing ${serverName} OAuth authorization...`); + } + + rl.close(); + } + + // Step 3: Get tools from the Strata server + const mcpTools = await klavisClient.mcpServer.listTools({ + serverUrl: response.strataServerUrl, + format: Klavis.ToolFormat.LangChain + }); + + // Setup LLM + const llm = new ChatOpenAI({ + model: 'gpt-4o-mini', + apiKey: process.env.OPENAI_API_KEY!, + }); + + // Step 4: Create LangChain agent with MCP tools + const agent = createReactAgent({ + llm, + tools: mcpTools.tools, + systemMessage: 'You are a helpful assistant that can use MCP tools.', + }); + + const myEmail = 'golden-kpop@example.com'; // TODO: Replace with your email + + // Step 5: Invoke the agent + const result = await agent.invoke({ + messages: [{ + role: 'user' as const, + content: `summarize this video - https://youtu.be/yebNIHKAC4A?si=1Rz_ZsiVRz0YfOR7 and send the summary to my email ${myEmail}` + }], + }); + + // Print only the final AI response content + const lastMessage = result.messages[result.messages.length - 1]; + console.log(lastMessage.content); +} + +if (import.meta.url === `file://${process.argv[1]}`) { + main().catch(console.error); +} diff --git a/examples/strata-cookbooks/typescript/package.json b/examples/strata-cookbooks/typescript/package.json new file mode 100644 index 00000000..61b76926 --- /dev/null +++ b/examples/strata-cookbooks/typescript/package.json @@ -0,0 +1,24 @@ +{ + "name": "strata-langchain-typescript-example", + "version": "1.0.0", + "description": "TypeScript example using Klavis Strata with LangChain", + "main": "build_with_langchain.js", + "scripts": { + "build": "tsc", + "start": "node build_with_langchain.js", + "dev": "tsx build_with_langchain.ts" + }, + "dependencies": { + "@langchain/openai": "^0.3.33", + "@langchain/langgraph": "^0.6.7", + "klavis": "^2.0.2", + "dotenv": "^16.4.7", + "open": "^10.1.0" + }, + "devDependencies": { + "@types/node": "^22.9.3", + "tsx": "^4.19.2", + "typescript": "^5.6.3" + }, + "type": "module" +} diff --git a/examples/strata-cookbooks/typescript/tsconfig.json b/examples/strata-cookbooks/typescript/tsconfig.json new file mode 100644 index 00000000..b0cdf581 --- /dev/null +++ b/examples/strata-cookbooks/typescript/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "outDir": "./dist", + "rootDir": "./", + "resolveJsonModule": true + }, + "include": ["*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/examples/together-ai/Agents_KlavisAI.ipynb b/examples/together-ai/Agents_KlavisAI.ipynb new file mode 100644 index 00000000..23c366b1 --- /dev/null +++ b/examples/together-ai/Agents_KlavisAI.ipynb @@ -0,0 +1,388 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/togethercomputer/together-cookbook/blob/main/Agents/KlavisAI/Use_Klavis_with_Together.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# Together AI + Klavis AI Integration\n", + "\n", + "# \n", + "\n", + "In this tutorial, we'll explore how to build an AI agent that integrates Together AI's powerful LLMs with Klavis MCP Servers, enabling seamless interaction with external services and APIs.\n", + "\n", + "This integration combines:\n", + "- **Together AI**: High-performance open-source LLMs with function calling capabilities\n", + "- **Klavis AI**: MCP (Model Context Protocol) servers for connecting to external tools and services\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **Together AI API key** - Get yours at [together.ai](https://together.ai/)\n", + "- **Klavis AI API key** - Get yours at [klavis.ai](https://klavis.ai/)\n", + "\n", + "Make sure to keep these API keys secure and never commit them to version control!\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m25.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU together klavis" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "from together import Together\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables\n", + "os.environ[\"TOGETHER_API_KEY\"] = \"your-together-api-key-here\" # Replace with your actual Together API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"your-klavis-api-key-here\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Create AI Agent with MCP Integration\n", + "\n", + "Now we'll create an intelligent agent that uses Together AI's powerful LLMs with Klavis MCP servers. This agent will:\n", + "\n", + "1. **Discover Tools**: Automatically find available tools from MCP servers\n", + "2. **Function Calling**: Use Together AI's function calling capabilities\n", + "3. **Tool Execution**: Execute tools through Klavis API\n", + "4. **Smart Responses**: Generate intelligent responses based on tool results\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "class Agent:\n", + " def __init__(self, together_client, klavis_client, mcp_server_url, model=\"meta-llama/Llama-3.3-70B-Instruct-Turbo\"):\n", + " self.together = together_client\n", + " self.klavis = klavis_client\n", + " self.mcp_server_url = mcp_server_url\n", + " self.model = model\n", + " print(f\"\ud83e\udd16 Agent initialized with Together AI model: {self.model}\")\n", + " \n", + " def process_request(self, user_message):\n", + " # 1. Get available tools\n", + " mcp_tools = self.klavis.mcp_server.list_tools(\n", + " server_url=self.mcp_server_url,\n", + " format=ToolFormat.OPENAI,\n", + " )\n", + " \n", + " # 2. Call LLM with tools\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a helpful AI assistant with access to various tools.\"},\n", + " {\"role\": \"user\", \"content\": user_message}\n", + " ]\n", + " \n", + " response = self.together.chat.completions.create(\n", + " model=self.model,\n", + " messages=messages,\n", + " tools=mcp_tools.tools\n", + " )\n", + " \n", + " assistant_message = response.choices[0].message\n", + " messages.append(assistant_message)\n", + " \n", + " # 3. If LLM wants to use tools\n", + " if assistant_message.tool_calls:\n", + " \n", + " # Execute each tool call\n", + " for tool_call in assistant_message.tool_calls:\n", + " tool_name = tool_call.function.name\n", + " tool_args = json.loads(tool_call.function.arguments)\n", + " \n", + " print(f\"\ud83d\udee0\ufe0f Calling tool: {tool_name} with args: {tool_args}\")\n", + " # Call tool via Klavis SDK\n", + " tool_result = self.klavis.mcp_server.call_tools(\n", + " server_url=self.mcp_server_url,\n", + " tool_name=tool_name,\n", + " tool_args=tool_args,\n", + " )\n", + " \n", + " messages.append({\n", + " \"role\": \"tool\",\n", + " \"tool_call_id\": tool_call.id,\n", + " \"content\": str(tool_result)\n", + " })\n", + " \n", + " # 4. Get final response from LLM\n", + " final_response = self.together.chat.completions.create(\n", + " model=self.model,\n", + " messages=messages\n", + " )\n", + " return final_response.choices[0].message.content\n", + " \n", + " # If no tools needed, return the assistant message directly\n", + " return assistant_message.content\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Use Case 1: Summarize YouTube Video" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Created YouTube MCP instance\n", + "\ud83e\udd16 Agent initialized with Together AI model: meta-llama/Llama-3.3-70B-Instruct-Turbo\n", + "\ud83d\udee0\ufe0f Calling tool: get_youtube_video_transcript with args: {'url': 'https://www.youtube.com/watch?v=TG6QOa2JJJQ'}\n", + "The YouTube video \"Together AI CEO: Open Source Is the Future of AI\" features an interview with Vipul Ved Prakash, the CEO of Together AI, and Bucky Moore, a partner at Kleiner Perkins. The discussion revolves around Together AI's $102.5 million Series A fundraise, led by Kleiner Perkins, and the company's focus on open-source AI.\n", + "\n", + "Here is a comprehensive summary of the video with timestamps:\n", + "\n", + "* 0:00 - Introduction to the video and the guests\n", + "* 0:30 - Discussion of Together AI's Series A fundraise and the company's mission\n", + "* 1:45 - Vipul Ved Prakash explains the importance of open-source AI and how it can benefit the industry\n", + "* 3:10 - Bucky Moore shares his perspective on the potential of open-source AI and its applications\n", + "* 4:30 - The guests discuss the challenges and opportunities in the AI industry, including the need for more diverse and inclusive data sets\n", + "* 5:50 - Vipul Ved Prakash talks about the company's plans for the future and how it aims to make AI more accessible and affordable for everyone\n", + "* 6:40 - Conclusion and final thoughts from the guests\n", + "\n", + "Overall, the video provides insights into the future of AI and the potential of open-source AI, as well as the company's plans and goals.\n" + ] + } + ], + "source": [ + "\n", + "# Example YouTube video URL - replace with any video you'd like to analyze\n", + "YOUTUBE_VIDEO_URL = \"https://www.youtube.com/watch?v=TG6QOa2JJJQ\"\n", + "\n", + "# 1. Initialize Together AI client and Klavis client\n", + "together_client = Together(api_key=os.getenv(\"TOGETHER_API_KEY\"))\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))\n", + "\n", + "# 2. Create YouTube MCP server instance\n", + "youtube_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.YOUTUBE,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# 3. Create an agent with YouTube MCP server\n", + "agent = Agent(\n", + " together_client=together_client, \n", + " klavis_client=klavis_client, \n", + " mcp_server_url=youtube_mcp_instance.server_url,\n", + " model=\"meta-llama/Llama-3.3-70B-Instruct-Turbo\"\n", + ")\n", + "\n", + "# 4. Process the request\n", + "response = agent.process_request(\n", + " f\"Please analyze this YouTube video and provide a comprehensive summary with timestamps: {YOUTUBE_VIDEO_URL}\"\n", + ")\n", + "\n", + "print(response)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Use Case 2: Send Email\n", + "\n", + "**Note**: Gmail integration requires OAuth authentication, so you'll need to authorize the application in your browser.\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u2705 Created Gmail MCP instance\n", + "\ud83d\udd10 Opening OAuth authorization for Gmail\n", + "If you are not redirected automatically, please open this URL: https://api.klavis.ai/oauth/gmail/authorize?instance_id=d9d482b3-433a-4330-9a8b-9548c0b0a326\n" + ] + } + ], + "source": [ + "import webbrowser\n", + "\n", + "# Create Gmail MCP server instance\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# Redirect to Gmail OAuth page for authorization\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail\")\n", + "print(f\"If you are not redirected automatically, please open this URL: {gmail_mcp_instance.oauth_url}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83e\udd16 Agent initialized with Together AI model: Qwen/Qwen2.5-72B-Instruct-Turbo\n", + "\ud83d\udee0\ufe0f Calling tool: send_email with args: {'body': 'This is a test email sent using the Together AI and Klavis AI integration. The email was sent automatically by your AI agent!', 'subject': 'Greetings from Together AI + Klavis Integration', 'to': ['zihaolin@klavis.ai']}\n", + "The email has been sent successfully to zihaolin@klavis.ai with the subject 'Greetings from Together AI + Klavis Integration'. The email ID is 19776f818ce706db.\n" + ] + } + ], + "source": [ + "# Email configuration\n", + "EMAIL_RECIPIENT = \"zihaolin@klavis.ai\" # Replace with the recipient's email\n", + "EMAIL_SUBJECT = \"Greetings from Together AI + Klavis Integration\"\n", + "EMAIL_BODY = \"This is a test email sent using the Together AI and Klavis AI integration. The email was sent automatically by your AI agent!\"\n", + "\n", + "# After OAuth authorization is complete, create the Gmail agent\n", + "gmail_agent = Agent(\n", + " together_client=together_client,\n", + " klavis_client=klavis_client,\n", + " mcp_server_url=gmail_mcp_instance.server_url,\n", + " model=\"Qwen/Qwen2.5-72B-Instruct-Turbo\"\n", + ")\n", + "\n", + "# Send the email\n", + "response = gmail_agent.process_request(\n", + " f\"Please send an email to {EMAIL_RECIPIENT} with the subject '{EMAIL_SUBJECT}' and the following body: '{EMAIL_BODY}'\"\n", + ")\n", + "\n", + "print(response)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to integrate Together AI's function calling capabilities with Klavis MCP servers to create powerful AI applications. We covered two practical examples:\n", + "\n", + "**\ud83c\udfa5 YouTube Integration**: Built an AI assistant that can automatically summarize YouTube videos by extracting transcripts and providing detailed, timestamped summaries.\n", + "\n", + "**\ud83d\udce7 Gmail Integration**: Created an AI-powered email assistant that can send emails through Gmail with OAuth authentication.\n", + "\n", + "### Key Takeaways:\n", + "- **Easy Setup**: Klavis MCP servers can be created with just a few lines of code using the official SDK\n", + "- **Together AI Compatible**: All tools are formatted for seamless Together AI function calling\n", + "- **Versatile**: Support for both simple APIs (YouTube) and OAuth-authenticated services (Gmail)\n", + "- **Scalable**: The same pattern can be applied to any of the 100+ MCP servers available in Klavis\n", + "\n", + "### \ud83d\ude80 Next Steps\n", + "- **Explore More MCP Servers**: Try other available servers like Slack, Notion, CRM etc.\n", + "- **Experiment with Different Models**: Test various Together AI models for different use cases.\n", + "- **Build Complex Multi-Server Workflows**: Create sophisticated agents that combine multiple services\n", + "- **Production Deployment**: Scale these patterns for production applications\n", + "- **Custom MCP Servers**: Build your own MCP servers for proprietary systems\n", + "\n", + "### \ud83d\udd17 Useful Resources\n", + "- [Together AI Documentation](https://docs.together.ai/)\n", + "- [Klavis AI Documentation](https://www.klavis.ai/docs/)\n", + "- [MCP Protocol Specification](https://modelcontextprotocol.io/)\n", + "- [Together AI Models](https://docs.together.ai/docs/inference-models)\n", + "- [Klavis MCP Servers](https://www.klavis.ai/docs/mcp-servers)\n", + "\n", + "**Happy building with Together AI and Klavis!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/examples/together-ai/Salesforce_Gmail_TogetherAI_Integration.ipynb b/examples/together-ai/Salesforce_Gmail_TogetherAI_Integration.ipynb new file mode 100644 index 00000000..3c1c2e64 --- /dev/null +++ b/examples/together-ai/Salesforce_Gmail_TogetherAI_Integration.ipynb @@ -0,0 +1,361 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/klavis-ai/klavis/blob/main/examples/together-ai/Salesforce_Gmail_TogetherAI_Integration.ipynb)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "# Together AI + Klavis Salesforce + Gmail Integration\n", + "\n", + "# \n", + "\n", + "This tutorial demonstrates how to build a powerful AI workflow using Together AI and Klavis MCP servers to:\n", + "\n", + "- **Salesforce Integration**: Automatically find CRM data like opportunities\n", + "- **Gmail Integration**: Draft and send professional follow-up emails\n", + "- **AI-Powered**: Use Together AI's LLMs for intelligent email composition\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Prerequisites\n", + "\n", + "Before we begin, you'll need:\n", + "\n", + "- **Together AI API key** - Get yours at [together.ai](https://together.ai/)\n", + "- **Klavis AI API key** - Get yours at [klavis.ai](https://klavis.ai/)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# Install the required packages\n", + "%pip install -qU together klavis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import webbrowser\n", + "from together import Together\n", + "from klavis import Klavis\n", + "from klavis.types import McpServerName, ToolFormat\n", + "\n", + "# Set environment variables\n", + "os.environ[\"TOGETHER_API_KEY\"] = \"YOUR_TOGETHER_API_KEY\" # Replace with your actual Together API key\n", + "os.environ[\"KLAVIS_API_KEY\"] = \"YOUR_KLAVIS_API_KEY\" # Replace with your actual Klavis API key" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 1: Initialize Clients and Create MCP Server Instances\n", + "\n", + "First, let's set up our Together AI and Klavis clients, then create Salesforce and Gmail MCP server instances." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize clients\n", + "together_client = Together(api_key=os.getenv(\"TOGETHER_API_KEY\"))\n", + "klavis_client = Klavis(api_key=os.getenv(\"KLAVIS_API_KEY\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd10 Opening OAuth authorization for Salesforce...\n" + ] + } + ], + "source": [ + "# Create Salesforce MCP Server instance\n", + "salesforce_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.SALESFORCE,\n", + " user_id=\"1234\", \n", + ")\n", + "\n", + "# Open OAuth URL for Salesforce authorization\n", + "webbrowser.open(salesforce_mcp_instance.oauth_url)\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Salesforce...\")" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udd10 Opening OAuth authorization for Gmail...\n" + ] + } + ], + "source": [ + "# Create Gmail MCP Server instance\n", + "gmail_mcp_instance = klavis_client.mcp_server.create_server_instance(\n", + " server_name=McpServerName.GMAIL,\n", + " user_id=\"1234\",\n", + ")\n", + "\n", + "# Open OAuth URL for Gmail authorization\n", + "webbrowser.open(gmail_mcp_instance.oauth_url)\n", + "print(f\"\ud83d\udd10 Opening OAuth authorization for Gmail...\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Step 2: Create workflow \n", + "\n", + "Now we'll create a workflow that can work with both Salesforce and Gmail MCP servers." + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\ud83d\udee0\ufe0f Calling tool: salesforce_get_opportunities with args: {'fields': [], 'limit': 50}\n", + "\u2705 Tool salesforce_get_opportunities executed successfully\n", + "\ud83d\udee0\ufe0f Calling tool: salesforce_update_opportunity with args: {'next_step': 'Meeting schedule on 07/31/2025', 'opportunity_id': '006fJ0000080dpaQAA'}\n", + "\u2705 Tool salesforce_update_opportunity executed successfully\n", + "\ud83d\udee0\ufe0f Calling tool: gmail_draft_email with args: {'body': 'Next step: Meeting schedule on 07/31/2025', 'subject': 'Follow-up on Together AI Opportunity', 'to': ['together.ai@example.com']}\n", + "\u2705 Tool gmail_draft_email executed successfully\n", + "\u2705 Task completed in 6 iterations\n", + "A follow-up email for the next step has been drafted in your Gmail account with the subject \"Follow-up on Together AI Opportunity\" and the body \"Next step: Meeting schedule on 07/31/2025\".\n", + "\n", + "Done\n" + ] + } + ], + "source": [ + "class Workflow:\n", + " def __init__(self, together_client, klavis_client, mcp_server_urls, model=\"meta-llama/Llama-3.3-70B-Instruct-Turbo\"):\n", + " self.together = together_client\n", + " self.klavis = klavis_client\n", + " self.mcp_server_urls = mcp_server_urls\n", + " self.model = model\n", + " \n", + " def process_request(self, user_message):\n", + " # 1. Get available tools from all MCP servers and create tool-to-server mapping\n", + " all_tools = []\n", + " tool_to_server = {} # Maps tool names to their server URLs\n", + " \n", + " for server_url in self.mcp_server_urls:\n", + " mcp_tools = self.klavis.mcp_server.list_tools(\n", + " server_url=server_url,\n", + " format=ToolFormat.OPENAI,\n", + " )\n", + " all_tools.extend(mcp_tools.tools)\n", + " \n", + " for tool in mcp_tools.tools:\n", + " tool_to_server[tool[\"function\"][\"name\"]] = server_url\n", + " \n", + " # 2. Initialize conversation\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a helpful AI assistant with access to tools. Complete requested tasks step by step with the tools available. When all tasks are completed, end your response with 'Done'.\"},\n", + " {\"role\": \"user\", \"content\": user_message}\n", + " ]\n", + " \n", + " max_iterations = 10 \n", + " iteration = 0\n", + " \n", + " # 3. Keep processing until no more tool calls are needed\n", + " while iteration < max_iterations:\n", + " iteration += 1\n", + " \n", + " # Call LLM with all available tools\n", + " response = self.together.chat.completions.create(\n", + " model=self.model,\n", + " messages=messages,\n", + " tools=all_tools\n", + " )\n", + " \n", + " assistant_message = response.choices[0].message\n", + " messages.append(assistant_message)\n", + " \n", + " # If tool calls are needed\n", + " if assistant_message.tool_calls:\n", + " \n", + " # Execute tool calls\n", + " for tool_call in assistant_message.tool_calls:\n", + " tool_name = tool_call.function.name\n", + " tool_args = json.loads(tool_call.function.arguments)\n", + " \n", + " print(f\"\ud83d\udee0\ufe0f Calling tool: {tool_name} with args: {tool_args}\")\n", + " \n", + " # Find the correct server for this tool\n", + " if tool_name in tool_to_server:\n", + " server_url = tool_to_server[tool_name]\n", + " \n", + " try:\n", + " tool_result = self.klavis.mcp_server.call_tools(\n", + " server_url=server_url,\n", + " tool_name=tool_name,\n", + " tool_args=tool_args,\n", + " )\n", + " print(f\"\u2705 Tool {tool_name} executed successfully\")\n", + " except Exception as e:\n", + " tool_result = f\"Error executing tool {tool_name}: {str(e)}\"\n", + " print(f\"\u274c Tool {tool_name} failed: {str(e)}\")\n", + " else:\n", + " tool_result = f\"Error: Tool {tool_name} not found in any server\"\n", + " print(f\"\u274c Tool {tool_name} not found in any server\")\n", + " \n", + " messages.append({\n", + " \"role\": \"tool\",\n", + " \"tool_call_id\": tool_call.id,\n", + " \"content\": str(tool_result)\n", + " })\n", + " \n", + " # Continue the loop to see if LLM wants to make more tool calls\n", + " continue\n", + " \n", + " else:\n", + " # Check if the assistant said \"Done\" indicating all tasks are complete\n", + " content = assistant_message.content or \"\"\n", + " \n", + " if \"done\" in content.lower():\n", + " print(f\"\u2705 Task completed in {iteration} iterations\")\n", + " return assistant_message.content\n", + " else:\n", + " continue\n", + " \n", + " # If we hit max iterations, return the last response\n", + " print(f\"\u26a0\ufe0f Reached max iterations ({max_iterations})\")\n", + " return assistant_message.content if assistant_message.content else \"Task completed but reached iteration limit\"\n", + "\n", + "workflow = Workflow(\n", + " together_client=together_client,\n", + " klavis_client=klavis_client,\n", + " mcp_server_urls=[salesforce_mcp_instance.server_url, gmail_mcp_instance.server_url],\n", + " model=\"meta-llama/Llama-3.3-70B-Instruct-Turbo\"\n", + ")\n", + "\n", + "# Single request that uses both services\n", + "multi_response = workflow.process_request(\n", + " \"\"\"\n", + " 1. list my salesforce opportunities\n", + " 2. then update Together AI opportunity's next step to meeting schedule on 07/31/2025\n", + " 3. then in my gmail, draft a follow-up email for this next step\n", + " \"\"\"\n", + ")\n", + "\n", + "print(multi_response)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "## Summary\n", + "\n", + "This tutorial demonstrated how to create a powerful Salesforce + Gmail integration using Together AI and Klavis MCP servers.\n", + "\n", + "### \ud83d\ude80 **Key Features:**\n", + "\n", + "- **Simple Workflow Classes**: Easy-to-use Workflow classes that work with any MCP server\n", + "- **Flexible Model Selection**: Support for various Together AI models (Llama, Qwen, etc.)\n", + "- **Real-time Execution**: Direct tool execution through Klavis API\n", + "\n", + "\n", + "### \ud83d\udd27 **Next Steps:**\n", + "\n", + "- **Try More MCP Servers**: Integrate additional MCP servers like Slack, Notion, or Linear\n", + "- **Custom Workflows**: Create more sophisticated multi-step workflows\n", + "- **Error Handling**: Add robust error handling and retry logic\n", + "- **Production Deployment**: Scale for production use with proper monitoring\n", + "\n", + "**Happy building with Together AI and Klavis!** \ud83d\ude80\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/fern/README.md b/fern/README.md new file mode 100644 index 00000000..0da3108d --- /dev/null +++ b/fern/README.md @@ -0,0 +1,9 @@ +# How to Upgrade SDK + +## Steps: + +1. **Update openapi.json file** with updated API specification + +2. **Submit PR and merge** your changes to the main branch + +3. **Go to GitHub Actions**, click `Publish Python SDK` and `Publish TypeScript SDK`, specify new version, then it will automatically generate new upgraded SDK \ No newline at end of file diff --git a/fern/fern.config.json b/fern/fern.config.json new file mode 100644 index 00000000..dae49a97 --- /dev/null +++ b/fern/fern.config.json @@ -0,0 +1,4 @@ +{ + "organization": "klavis", + "version": "0.64.12" +} \ No newline at end of file diff --git a/fern/generators.yml b/fern/generators.yml new file mode 100644 index 00000000..7f9f77e3 --- /dev/null +++ b/fern/generators.yml @@ -0,0 +1,51 @@ +# yaml-language-server: $schema=https://schema.buildwithfern.dev/generators-yml.json + +api: + specs: + - openapi: ../docs/api-reference/openapi.json + overrides: overrides.yml + origin: https://api.klavis.ai/openapi.json # URL to fetch latest spec from + +groups: + python-sdk: + generators: + - name: fernapi/fern-python-sdk + version: 4.32.2 + output: + location: pypi + package-name: klavis + token: ${PYPI_TOKEN} + metadata: + keywords: ["mcp", "model-context-protocol", "router", "ai", "llm", "tools"] + documentation-link: "https://www.klavis.ai/docs/introduction" + homepage-link: "https://www.klavis.ai/" + + github: + repository: Klavis-AI/python-sdk + config: + client_class_name: Klavis + pydantic_config: + enum_type: python_enums + metadata: + package-description: "Open Source MCP Integration for AI applications" + license: MIT + smart-casing: true + ts-sdk: + generators: + - name: fernapi/fern-typescript-node-sdk + version: 1.7.0 + output: + location: npm + package-name: klavis + token: ${NPM_TOKEN} + github: + repository: Klavis-AI/typescript-sdk + config: + namespaceExport: Klavis + allowCustomFetcher: true + skipResponseValidation: true + includeApiReference: true + noSerdeLayer: true + extraDevDependencies: + msw: '2.11.2' + smart-casing: true diff --git a/fern/overrides.yml b/fern/overrides.yml new file mode 100644 index 00000000..b6c15864 --- /dev/null +++ b/fern/overrides.yml @@ -0,0 +1,225 @@ +components: + schemas: + McpServerName: + x-fern-enum: + "GitHub": + name: GITHUB + "YouTube": + name: YOUTUBE + "ClickUp": + name: CLICKUP + "DocuSign": + name: DOCUSIGN + "GitLab": + name: GITLAB + "HubSpot": + name: HUBSPOT + "LinkedIn": + name: LINKEDIN + "OneDrive": + name: ONEDRIVE + "PagerDuty": + name: PAGERDUTY + "PostHog": + name: POSTHOG + "QuickBooks": + name: QUICKBOOKS + "SendGrid": + name: SENDGRID + "WhatsApp": + name: WHATSAPP + "WordPress": + name: WORDPRESS + +paths: + "/mcp-server/call-tool": + post: + operationId: call_tools + x-fern-sdk-group-name: mcp_server + x-fern-sdk-method-name: call_tools + "/mcp-server/list-tools": + post: + operationId: list_tools + x-fern-sdk-group-name: mcp_server + x-fern-sdk-method-name: list_tools + "/mcp-server/tools/{serverName}": + get: + operationId: get_tools + x-fern-sdk-group-name: mcp_server + x-fern-sdk-method-name: get_tools + "/mcp-server/oauth-url": + post: + x-fern-sdk-group-name: mcp_server + x-fern-sdk-method-name: get_oauth_url + "/oauth/slack/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_slack + "/oauth/github/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_github + "/oauth/gitlab/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gitlab + "/oauth/supabase/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_supabase + "/oauth/notion/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_notion + "/oauth/jira/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_jira + "/oauth/confluence/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_confluence + "/oauth/wordpress/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_wordpress + "/oauth/gmail/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gmail + "/oauth/gdrive/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gdrive + "/oauth/gcalendar/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gcalendar + "/oauth/gsheets/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gsheets + "/oauth/gdocs/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_gdocs + "/oauth/attio/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_attio + "/oauth/salesforce/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_salesforce + "/oauth/asana/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_asana + "/oauth/linear/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_linear + "/oauth/close/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_close + "/oauth/clickup/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_clickup + "/oauth/airtable/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_airtable + "/oauth/hubspot/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_hubspot + "/oauth/linkedin/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_linkedin + "/oauth/canva/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_canva + "/oauth/xero/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_xero + "/oauth/dropbox/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_dropbox + "/oauth/box/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_box + "/oauth/quickbooks/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_quickbooks + "/oauth/zendesk/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_zendesk + "/oauth/stripe/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_stripe + "/oauth/calcom/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_calcom + "/oauth/vercel/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_vercel + "/oauth/pipedrive/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_pipedrive + "/oauth/figma/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_figma + "/oauth/klaviyo/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_klaviyo + "/oauth/pagerduty/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_pagerduty + "/oauth/docusign/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_docusign + "/oauth/dialpad/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_dialpad + "/oauth/shopify/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_shopify + "/oauth/onedrive/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_onedrive + "/oauth/outlook/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_outlook + "/oauth/teams/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_teams + "/oauth/fathom/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_fathom + "/oauth/monday/authorize": + get: + x-fern-sdk-group-name: oauth + x-fern-sdk-method-name: authorize_monday \ No newline at end of file diff --git a/mcp-clients/.dockerignore b/mcp-clients/.dockerignore new file mode 100644 index 00000000..b389585e --- /dev/null +++ b/mcp-clients/.dockerignore @@ -0,0 +1,45 @@ +.venv + +# Python environment +.env +.env.* + +# OS generated files +.DS_Store +*.db + +# IDE specific files +.idea/ +.vscode/ +*.iml +*.project +.classpath +*.launch + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Testing +.pytest_cache/ +test-results/ \ No newline at end of file diff --git a/mcp_clients/.env.example b/mcp-clients/.env.example similarity index 70% rename from mcp_clients/.env.example rename to mcp-clients/.env.example index 33e799b7..b21503fd 100644 --- a/mcp_clients/.env.example +++ b/mcp-clients/.env.example @@ -29,3 +29,11 @@ SLACK_CLIENT_SECRET=SLACK_CLIENT_SECRET SLACK_SIGNING_SECRET=SLACK_SIGNING_SECRET SLACK_BOT_USER_ID=SLACK_BOT_USER_ID SLACK_SCOPES=SLACK_SCOPES # for oauth only + +# WhatsApp API Configuration +WHATSAPP_ACCESS_TOKEN=WHATSAPP_ACCESS_TOKEN +WHATSAPP_APP_ID=WHATSAPP_APP_ID +WHATSAPP_APP_SECRET=WHATSAPP_APP_SECRET +WHATSAPP_PHONE_NUMBER_ID=WHATSAPP_PHONE_NUMBER_ID +WHATSAPP_VERIFY_TOKEN=WHATSAPP_VERIFY_TOKEN +CALLBACK_URL=CALLBACK_URL # for webhook only for localdevelopment connect to ngrok \ No newline at end of file diff --git a/mcp_clients/.gitignore b/mcp-clients/.gitignore similarity index 100% rename from mcp_clients/.gitignore rename to mcp-clients/.gitignore diff --git a/mcp-clients/.python-version b/mcp-clients/.python-version new file mode 100644 index 00000000..e4fba218 --- /dev/null +++ b/mcp-clients/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/mcp-clients/Dockerfile.discord b/mcp-clients/Dockerfile.discord new file mode 100644 index 00000000..f18e149e --- /dev/null +++ b/mcp-clients/Dockerfile.discord @@ -0,0 +1,33 @@ +FROM python:3.12-slim +COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /bin/ + +# Set working directory +WORKDIR /app + +# Install required system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install project dependencies +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project + +# Copy the project into the image +COPY . . + +# Sync the project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen + +# Set environment variable to indicate we're running in Docker +ENV RUNNING_IN_DOCKER=true + +# Expose the port the app runs on +EXPOSE 8080 + +# Command to run the Discord bot +CMD ["uv", "run", "discord_bot"] \ No newline at end of file diff --git a/mcp-clients/Dockerfile.slack b/mcp-clients/Dockerfile.slack new file mode 100644 index 00000000..7b5c1f11 --- /dev/null +++ b/mcp-clients/Dockerfile.slack @@ -0,0 +1,33 @@ +FROM python:3.12-slim +COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /bin/ + +# Set working directory +WORKDIR /app + +# Install required system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install project dependencies +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project + +# Copy the project into the image +COPY . . + +# Sync the project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen + +# Set environment variable to indicate we're running in Docker +ENV RUNNING_IN_DOCKER=true + +# Expose the port the app runs on +EXPOSE 8080 + +# Command to run the Slack bot +CMD ["uv", "run", "slack_bot"] \ No newline at end of file diff --git a/mcp-clients/Dockerfile.web b/mcp-clients/Dockerfile.web new file mode 100644 index 00000000..9816860a --- /dev/null +++ b/mcp-clients/Dockerfile.web @@ -0,0 +1,33 @@ +FROM python:3.12-slim +COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /bin/ + +# Set working directory +WORKDIR /app + +# Install required system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install project dependencies +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project + +# Copy the project into the image +COPY . . + +# Sync the project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen + +# Set environment variable to indicate we're running in Docker +ENV RUNNING_IN_DOCKER=true + +# Expose the port the app runs on +EXPOSE 8080 + +# Command to run the Web bot +CMD ["uv", "run", "web_bot"] \ No newline at end of file diff --git a/mcp-clients/Dockerfile.whatsapp b/mcp-clients/Dockerfile.whatsapp new file mode 100644 index 00000000..2badcaf1 --- /dev/null +++ b/mcp-clients/Dockerfile.whatsapp @@ -0,0 +1,33 @@ +FROM python:3.12-slim +COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /bin/ + +# Set working directory +WORKDIR /app + +# Install required system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install project dependencies +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project + +# Copy the project into the image +COPY . . + +# Sync the project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen + +# Set environment variable to indicate we're running in Docker +ENV RUNNING_IN_DOCKER=true + +# Expose the port the app runs on +EXPOSE 8080 + +# Command to run the WhatsApp bot +CMD ["uv", "run", "whatsapp_bot"] \ No newline at end of file diff --git a/mcp-clients/README-Discord.md b/mcp-clients/README-Discord.md new file mode 100644 index 00000000..9b011b34 --- /dev/null +++ b/mcp-clients/README-Discord.md @@ -0,0 +1,136 @@ +# Klavis AI Discord Bot (MCP Client) - Local Development + +This document provides instructions for setting up and running the Klavis AI Discord Bot locally for development and testing purposes. This bot acts as a client for the Model Context Protocol (MCP), allowing users to interact with connected MCP servers and utilize their tools through Discord. + +**Note:** This README is intended for developers or users who want to run the bot on their own machine. For regular use, please invite the official Klavis AI bot available through [www.klavis.ai](https://www.klavis.ai). The local development version runs with `USE_PRODUCTION_DB=False`, which uses local configuration files and might have different behavior or features compared to the hosted production bot (e.g., user verification is skipped). + +## Prerequisites + +- **Python:** Version 3.12 or higher. +- **uv:** Version 0.6.14 or higher. +- **Docker:** Recommended for easiest setup and execution. ([Docker Desktop](https://www.docker.com/products/docker-desktop/)) +- **Git:** For cloning the repository. +- **Discord Bot Token:** You need to create a Discord application and bot user to get a token. See [Discord Developer Portal](https://discord.com/developers/docs/intro). + +## Setup + +1. **Clone the Repository:** + + ```bash + git clone # Replace with the actual URL + cd klavis/mcp-clients # Navigate to the root directory of the project + ``` + +2. **Environment Variables:** + + - Create a file named `.env` in the root directory of mcp-clients (`klavis/mcp-clients`). + - Copy the example below and fill in your specific values: + + ```ini + # .env example + DISCORD_TOKEN="YOUR_DISCORD_BOT_TOKEN" + WEBSITE_URL="https://www.klavis.ai/" # Or http://localhost:3000 if running web UI locally + OPENAI_API_KEY="YOUR_OPENAI_API_KEY" # Needed for the default LLM in local mode + + # Optional: Set to true to use production database (NOT recommended for local dev) + # USE_PRODUCTION_DB=False + ``` + + - Replace `"YOUR_DISCORD_BOT_TOKEN"` with the token obtained from the Discord Developer Portal. + - Replace `"YOUR_OPENAI_API_KEY"` with your OpenAI API key. Local development mode defaults to using an OpenAI model (`gpt-4o`). + - `WEBSITE_URL` is used for generating login links (though login is bypassed in local mode). Point it to the production site or your local web UI instance. + - `USE_PRODUCTION_DB` defaults to `False` if omitted, which is the correct setting for local development. + +3. **Local MCP Servers Configuration:** + + - When running locally (`USE_PRODUCTION_DB=False`), the bot reads the list of MCP server URLs to connect to from `src/mcp_clients/local_mcp_servers.json`. + - Create this file if it doesn't exist. + - Add the URLs of the MCP servers you want the local bot to connect to. + + ```json + // mcp_clients/local_mcp_servers.json example + { + "server_urls": [ + "http://localhost:8000/sse" + // Add other local or remote MCP server SSE endpoints here + ] + } + ``` + + - Replace `http://localhost:8000/sse` with the actual URL of your running MCP server(s). + +## Running the Bot + +You can run the bot using Docker (recommended) or directly with Python in a virtual environment. Make sure you are in the `klavis/mcp-clients` root directory. + +### Method 1: Docker (Recommended) + +1. **Build the Docker Image:** + + ```bash + # Make sure that docker daemon is running before executing the command + docker build -t klavis-discord-bot -f Dockerfile.discord . + ``` + + _(**Note:** The `.` at the end is important - it specifies the build context as the current directory)_ + +2. **Run the Docker Container:** + This command runs the bot using the environment variables from your `.env` file and mounts your local `local_mcp_servers.json` into the container. + + ```bash + docker run --rm --env-file .env -v ./src/mcp_clients/local_mcp_servers.json:/app/src/mcp_clients/local_mcp_servers.json klavis-discord-bot + ``` + + - `--rm`: Automatically removes the container when it exits. + - `--env-file .env`: Loads environment variables from the `.env` file in your current directory (`klavis`). + - `-v ./src/mcp_clients/local_mcp_servers.json:/app/local_mcp_servers.json`: Mounts your local JSON config into the expected path (`/app/src/mcp_clients/local_mcp_servers.json`) inside the container. + + _(**Note:** If you encounter an error at this step, it may be due to the use of quotes in the `.env` file. Try removing all the quotes and run the command again. For example: change `DISCORD_TOKEN=""` to `DISCORD_TOKEN=`.)_ + +### Method 2: Python Virtual Environment + +1. **Create and Activate Virtual Environment:** + + ```bash + # Make sure to navigate to the root directory of the project (skip if already done) + cd klavis/mcp-clients + ``` + + ```bash + # Create environment (only needs to be done once) + uv venv + + # Activate environment + # Windows (Command Prompt/PowerShell): + .venv\Scripts\activate + # macOS/Linux (bash/zsh): + source .venv/bin/activate + ``` + +2. **Install Dependencies:** + + ```bash + uv sync + ``` + +3. **Run the Bot:** + Ensure your `.env` file exists in the `klavis/mcp-clients` root and `src/mcp_clients/local_mcp_servers.json` is configured. + ```bash + uv run discord_bot + ``` + +## Usage + +1. **Invite the Bot:** Go to your Discord application in the Developer Portal, navigate to the "OAuth2" > "URL Generator" section. Select the `bot` and `application.commands` scopes. Choose necessary permissions (e.g., `Send Messages`, `Read Message History`, `Embed Links`, `Create Public Threads`). Copy the generated URL and use it to invite the bot to your test server. +2. **Interact:** + - Mention the bot in a channel: `@YourBotName your query here` + - Send a Direct Message (DM) to the bot. + - When first interacting, the bot might send a message about linking your account (using the `WEBSITE_URL`). In local mode (`USE_PRODUCTION_DB=False`), user verification is skipped, so you can proceed to interact with the bot directly. + - The bot will connect to the MCP servers listed in `local_mcp_servers.json`, process your query using the configured LLM (OpenAI by default locally), and potentially use tools from the connected servers to respond. + +## Development Notes + +- The bot uses `asyncio` for asynchronous operations. +- Logging is directed to the standard output/console. +- Key libraries include `discord.py` (Discord interaction), `mcp-client` (MCP communication), `python-dotenv` (environment variables), `openai`/`anthropic` (LLM interaction). +- Refer to `discord_bot.py`, `base_bot.py`, and `mcp_client.py` for core logic. diff --git a/mcp_clients/README-Slack.md b/mcp-clients/README-Slack.md similarity index 53% rename from mcp_clients/README-Slack.md rename to mcp-clients/README-Slack.md index a5ab0c36..0b7cac2e 100644 --- a/mcp_clients/README-Slack.md +++ b/mcp-clients/README-Slack.md @@ -5,10 +5,44 @@ This guide will help you set up your own Slack app and connect it to our applica ## Prerequisites - A Slack workspace where you have admin permissions -- Python 3.8+ installed on your machine +- Python 3.12+ installed on your machine +- uv 0.6.14+ installed on your machine - Git repository cloned locally - ngrok installed (for local development) +## Setup + +1. **Clone the Repository:** + + ```bash + git clone # Replace with the actual URL + cd klavis/mcp-clients # Navigate to the root directory of the project + ``` + +2. **Environment Variables:** + + - Create a file named `.env` in the root directory of mcp-clients (`klavis/mcp-clients`) using the `.env.example` file. + - Replace the placeholder with actual values. + +3. **Local MCP Servers Configuration:** + + - When running locally (`USE_PRODUCTION_DB=False`), the bot reads the list of MCP server URLs to connect to from `src/mcp_clients/local_mcp_servers.json`. + - Create this file if it doesn't exist. + - Add the URLs of the MCP servers you want the local bot to connect to. + + ```json + // mcp_clients/local_mcp_servers.json example + { + "server_urls": [ + "http://localhost:8000/sse" + // Add other local or remote MCP server SSE endpoints here + ] + } + ``` + + - Replace `http://localhost:8000/sse` with the actual URL of your running MCP server(s). + + ## Step 1: Environment Setup 1. Copy the example environment file to create your own: @@ -43,15 +77,36 @@ This guide will help you set up your own Slack app and connect it to our applica ## Step 4: Start Local Development Environment -1. Run your application (default port is 8080): - ```bash - python mcp_clients/slack_bot.py - ``` -2. Start ngrok to create a secure tunnel to your local server: - ```bash - ngrok http 8080 - ``` -3. Copy the HTTPS URL provided by ngrok (e.g., `https://7c2b-2601-645-8400-6db0-c0b0-639c-bb9d-5d8c.ngrok-free.app`) +1. **Create and Activate Virtual Environment:** + ```bash + # Make sure to navigate to the root directory of the project (skip if already done) + cd klavis/mcp-clients + ``` + ```bash + # Create environment (only needs to be done once) + uv venv + # Activate environment + # Windows (Command Prompt/PowerShell): + .venv\Scripts\activate + # macOS/Linux (bash/zsh): + source .venv/bin/activate + ``` +2. **Install Dependencies:** + ```bash + uv sync + ``` +3. **Run the Bot (default port is 8080):** + Ensure your `.env` file exists in the `klavis/mcp-clients` root and `src/mcp_clients/local_mcp_servers.json` is configured. + ```bash + uv run slack_bot + ``` + +4. **Start ngrok to create a secure tunnel to your local server:** + ```bash + ngrok http 8080 + ``` + +5. **Copy the HTTPS URL provided by ngrok**(e.g., `https://7c2b-2601-645-8400-6db0-c0b0-639c-bb9d-5d8c.ngrok-free.app`) ## Step 5: Configure Event Subscriptions diff --git a/mcp_clients/README-Web.md b/mcp-clients/README-Web.md similarity index 80% rename from mcp_clients/README-Web.md rename to mcp-clients/README-Web.md index 44c0b756..4327fdf4 100644 --- a/mcp_clients/README-Web.md +++ b/mcp-clients/README-Web.md @@ -1,12 +1,13 @@ # Klavis AI Web Bot (MCP Client) - Local Development -This document provides instructions for setting up and running the Klavis AI Web Bot locally. This bot acts as a FastAPI backend, serving as a client for the Multi-Compute Platform (MCP) and allowing web frontends to interact with connected MCP servers and utilize their tools. +This document provides instructions for setting up and running the Klavis AI Web Bot locally. This bot acts as a FastAPI backend, serving as a client for the Model Context Protocol (MCP) and allowing web frontends to interact with connected MCP servers and utilize their tools. **Note:** This README is intended for developers setting up the backend service. This backend is typically consumed by a separate web frontend application. The local development version can run with `USE_PRODUCTION_DB=False`, which uses local configuration files and might have different behavior compared to the hosted production service (e.g., user verification and database interactions might be different). ## Prerequisites * **Python:** Version 3.12 or higher. +* **uv:** Version 0.6.14 or higher. * **Docker:** Recommended for easiest setup and execution. ([Docker Desktop](https://www.docker.com/products/docker-desktop/)) * **Git:** For cloning the repository. * **Required Python Libraries:** `fastapi`, `uvicorn`, `python-dotenv`, `aiohttp`, `mcp-client`, `openai`, `anthropic` (and others as specified in a requirements file). @@ -16,11 +17,11 @@ This document provides instructions for setting up and running the Klavis AI Web 1. **Clone the Repository:** ```bash git clone # Replace with the actual URL - cd klavis # Navigate to the root directory of the project + cd klavis/mcp-clients # Navigate to the root directory of the project ``` 2. **Environment Variables:** - * Copy the example environment file if one exists, or create a new file named `.env` in the root directory (`klavis`). + * Copy the example environment file if one exists, or create a new file named `.env` in the root directory (`klavis/mcp-clients`). * Ensure the following variables are set: ```ini @@ -34,7 +35,7 @@ This document provides instructions for setting up and running the Klavis AI Web * `USE_PRODUCTION_DB` defaults to `False` if omitted, which is the correct setting for local development using `local_mcp_servers.json`. If set to `True`, ensure database connection variables are also present. 3. **Local MCP Servers Configuration (if `USE_PRODUCTION_DB=False`):** - * When running locally without a production database, the bot reads the list of MCP server URLs to connect to from `mcp_clients/local_mcp_servers.json`. + * When running locally without a production database, the bot reads the list of MCP server URLs to connect to from `src/mcp_clients/local_mcp_servers.json`. * Create this file if it doesn't exist. * Add the URLs of the MCP servers you want the local bot to connect to. @@ -51,14 +52,15 @@ This document provides instructions for setting up and running the Klavis AI Web ## Running the Web Service -You can run the service using Docker (recommended) or directly with Python in a virtual environment. Make sure you are in the `klavis` root directory. +You can run the service using Docker (recommended) or directly with Python in a virtual environment. Make sure you are in the `klavis/mcp-clients` root directory. ### Method 1: Docker (Recommended) 1. **Build the Docker Image:** - *(Assuming a Dockerfile exists, e.g., `mcp_clients/Dockerfile.web`)* + *(Assuming a Dockerfile exists, e.g., `Dockerfile.web`)* ```bash - docker build -t klavis-web-bot -f mcp_clients/Dockerfile.web . + # Make sure that docker daemon is running before executing the command + docker build -t klavis-web-bot -f Dockerfile.web . ``` *(Note: The `.` at the end specifies the build context. Adjust `Dockerfile.web` if the filename differs.)* @@ -66,7 +68,7 @@ You can run the service using Docker (recommended) or directly with Python in a This command runs the bot using the environment variables from your `.env` file and mounts your local `local_mcp_servers.json` if needed. ```bash # If using local_mcp_servers.json - docker run --rm -p 8080:8080 --env-file .env -v ./mcp_clients/local_mcp_servers.json:/app/local_mcp_servers.json klavis-web-bot + docker run --rm -p 8080:8080 --env-file .env -v ./src/mcp_clients/local_mcp_servers.json:/app/src/mcp_clients/local_mcp_servers.json klavis-web-bot # If using production DB (no volume mount needed for local_mcp_servers.json) # docker run --rm -p 8080:8080 --env-file .env klavis-web-bot @@ -79,29 +81,33 @@ You can run the service using Docker (recommended) or directly with Python in a ### Method 2: Python Virtual Environment 1. **Create and Activate Virtual Environment:** + ```bash - # Create environment - python -m venv venv + # Make sure to navigate to the root directory of the project (skip if already done) + cd klavis/mcp-clients + ``` + + ```bash + # Create environment (only needs to be done once) + uv venv # Activate environment - # Windows: .\venv\Scripts\activate - # macOS/Linux: source venv/bin/activate + # Windows (Command Prompt/PowerShell): + .venv\Scripts\activate + # macOS/Linux (bash/zsh): + source .venv/bin/activate ``` 2. **Install Dependencies:** - *(Assuming a requirements file exists, e.g., `mcp_clients/requirements-web.txt`)* + ```bash - pip install -r mcp_clients/requirements-web.txt + uv sync ``` - *(Adjust the requirements file path/name as needed)* -3. **Run the Service:** - Ensure your `.env` file exists in the `klavis` root and `mcp_clients/local_mcp_servers.json` is configured if required. +3. **Run the Bot:** + Ensure your `.env` file exists in the `klavis/mcp-clients` root and `src/mcp_clients/local_mcp_servers.json` is configured. ```bash - # This command runs the Uvicorn server directly - python mcp_clients/web_bot.py - # Alternatively, using uvicorn command for more options (like auto-reload) - # uvicorn mcp_clients.web_bot:app --host 0.0.0.0 --port 8080 --reload + uv run web_bot ``` ## API Endpoints diff --git a/mcp-clients/README-WhatsApp.md b/mcp-clients/README-WhatsApp.md new file mode 100644 index 00000000..6c47bd3e --- /dev/null +++ b/mcp-clients/README-WhatsApp.md @@ -0,0 +1,86 @@ +# Klavis AI WhatsApp Bot + +This is the WhatsApp integration for the Klavis AI system, allowing users to interact with Klavis AI through WhatsApp messages. + +## Features + +- Asynchronous message handling with FastAPI +- Support for WhatsApp Business Cloud API +- Streaming responses with batched delivery for better UX +- Integration with MCP servers for extended functionality +- Auto-splitting of long messages to fit WhatsApp limits +- Special handling for tool calls and running operations + +## Setup Requirements + +- WhatsApp Business API access (via Meta Developer Account) +- Python 3.12+ +- Environment variables for WhatsApp API credentials + +## Environment Variables + +``` +WHATSAPP_ACCESS_TOKEN=your_access_token +WHATSAPP_PHONE_NUMBER_ID=your_phone_number_id +WHATSAPP_VERIFY_TOKEN=your_custom_verification_token +WHATSAPP_APP_ID=your_app_id +WHATSAPP_APP_SECRET=your_app_secret +CALLBACK_URL=your_webhook_url +PORT=8080 +``` + +## WhatsApp API Setup + +1. Create a Meta Developer account at [developers.facebook.com](https://developers.facebook.com/) +2. Create a new "Business" type app and add the WhatsApp product +3. Configure webhook subscriptions for `messages` and `message_reactions` +4. Set your verification token (must match `WHATSAPP_VERIFY_TOKEN`) +5. Get your test phone number or link a WhatsApp Business account + +## Local Development + +```bash +# Install dependencies +uv sync + +# Run the bot +python -m src.mcp_clients.whatsapp_bot +``` + +## Using Docker + +```bash +# Build the Docker image +docker build -t klavis-whatsapp-bot -f Dockerfile.whatsapp . + +# Run with environment variables +docker run -p 8080:8080 --env-file .env klavis-whatsapp-bot +``` + +## Exposing Local Development for Testing + +Use ngrok or similar tools to expose your local server: + +```bash +ngrok http 8080 +``` + +Update the webhook URL in the Meta Developer Dashboard and the `CALLBACK_URL` environment variable with the ngrok URL. + +## Important WhatsApp API Considerations + +- Must use template messages for initial outbound messages +- 24-hour messaging window restriction after user contact +- Message templates require approval by Meta +- Rate limits apply (see [WhatsApp Business API documentation](https://developers.facebook.com/docs/whatsapp/cloud-api/)) + +## Troubleshooting + +- Check webhook verification configuration +- Verify access token hasn't expired +- Use template messages when initiating conversation +- Check bot logs for detailed error information + +## Architecture + +The WhatsApp bot is built on FastAPI and uses the pywa_async library for WhatsApp interactions. It connects to MCP servers to provide AI functionality and tool capabilities to users through WhatsApp. \ No newline at end of file diff --git a/mcp-clients/README.md b/mcp-clients/README.md new file mode 100644 index 00000000..81812098 --- /dev/null +++ b/mcp-clients/README.md @@ -0,0 +1,11 @@ +# MCP Clients + +This directory contains the client implementations for interacting with the Model Context Protocol (MCP). + +## Available Clients + +* **Discord:** For instructions on setting up and running the Discord bot locally, + see [README-Discord.md](./README-Discord.md). +* **Slack:** For instructions on setting up and running the Slack bot locally, see [README-Slack.md](./README-Slack.md). +* **Web:** For instructions on setting up and running the Web bot locally, see [README-Web.md](./README-Web.md). +* **WhatsApp:** For instructions on setting up and running the WhatsApp bot, see [README-WhatsApp.md](./README-WhatsApp.md). \ No newline at end of file diff --git a/mcp-clients/pyproject.toml b/mcp-clients/pyproject.toml new file mode 100644 index 00000000..03acbc5c --- /dev/null +++ b/mcp-clients/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "mcp-clients" +version = "0.1.0" +description = "Klavis AI: Open source MCP clients and Servers" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "discord.py>=2.3.0", + "anthropic>=0.54.0", + "python-dotenv>=1.1.0", + "anyio>=4.9.0", + "trio>=0.22.0", + "mcp>=1.9.3", + "supabase>=2.15.0", + "fastapi>=0.115.12", + "uvicorn>=0.34.0", + "slack-bolt>=1.23.0", + "httpx>=0.28.1", + "openai>=1.72.0", + "markitdown[all]", + "pywa>=0.7.0", + "uvloop>=0.17.0", +] + +[tool.uv] +package = true + +[project.scripts] +discord_bot = "mcp_clients.discord_bot:main" +slack_bot = "mcp_clients.slack_bot:main" +web_bot = "mcp_clients.web_bot:main" +whatsapp_bot = "mcp_clients.whatsapp_bot:main" \ No newline at end of file diff --git a/mcp_clients/__init__.py b/mcp-clients/src/mcp_clients/__init__.py similarity index 100% rename from mcp_clients/__init__.py rename to mcp-clients/src/mcp_clients/__init__.py diff --git a/mcp_clients/base_bot.py b/mcp-clients/src/mcp_clients/base_bot.py similarity index 92% rename from mcp_clients/base_bot.py rename to mcp-clients/src/mcp_clients/base_bot.py index c732d77e..de02e013 100644 --- a/mcp_clients/base_bot.py +++ b/mcp-clients/src/mcp_clients/base_bot.py @@ -1,16 +1,18 @@ +import json import logging +import os from abc import ABC, abstractmethod -from typing import Dict, Any, Optional, List, Tuple -from mcp_client import MCPClient +from typing import Dict, Any, Optional, List + import time -import json -import os -from config import USE_PRODUCTION_DB + +from mcp_clients.config import USE_PRODUCTION_DB # Define empty result structures for when database is not used -from llms import Conversation, ChatMessage -if USE_PRODUCTION_DB: - from database import database +from mcp_clients.llms.base import Conversation, ChatMessage +from mcp_clients.mcp_client import MCPClient +if USE_PRODUCTION_DB: + from mcp_clients.database import database logger = logging.getLogger("base_bot") @@ -22,12 +24,12 @@ class BotContext(ABC): """ def __init__( - self, - platform_name: str, - user_id: str, - mcp_client_id: str = None, - llm_id: str = None, - user_message: Any = None, + self, + platform_name: str, + user_id: str, + mcp_client_id: str = None, + llm_id: str = None, + user_message: Any = None, ): """ Initialize the bot context. @@ -42,7 +44,7 @@ def __init__( self.user_message = user_message self.mcp_client_id = mcp_client_id self.llm_id = llm_id - + @abstractmethod def get_channel_id(self) -> Optional[str]: """ @@ -53,7 +55,7 @@ def get_channel_id(self) -> Optional[str]: String representation of the channel ID, or None if not applicable """ pass - + @abstractmethod def get_thread_id(self) -> Optional[str]: """ @@ -101,7 +103,7 @@ async def verify_user(self, context: BotContext) -> Dict[str, Any]: Verify the user. """ start_time = time.time() - + if USE_PRODUCTION_DB: verification_result = await database.get_user_connection_information( context.platform_name, context.user_id @@ -123,13 +125,13 @@ async def verify_user(self, context: BotContext) -> Dict[str, Any]: logger.info("Database operations skipped: verify_user") return verification_result - + async def check_and_update_usage_limit(self, context: BotContext) -> bool: """ Check if the user has reached the usage limit and update it if so. """ start_time = time.time() - + if USE_PRODUCTION_DB: result = await database.check_and_update_usage_limit(context.mcp_client_id) logger.info(f"Check and update usage limit took {time.time() - start_time} seconds to complete") @@ -143,13 +145,13 @@ async def check_and_update_usage_limit(self, context: BotContext) -> bool: # Skip database operation, return dummy result logger.info("Database operations skipped: check_and_update_usage_limit") return True - + async def get_server_urls(self, context: BotContext) -> List[str]: """ Get the server URLs for the user. """ start_time = time.time() - + if USE_PRODUCTION_DB: mcp_servers = await database.get_connected_mcp_servers( context.mcp_client_id @@ -181,7 +183,7 @@ async def get_server_urls(self, context: BotContext) -> List[str]: return data.get("server_urls", []) async def initialize_mcp_client( - self, context: BotContext, server_urls: List[str] = None + self, context: BotContext, server_urls: List[str] = None ) -> Any: """ Initialize an MCP client for the user. @@ -251,7 +253,7 @@ async def initialize_mcp_client( return mcp_client async def store_new_messages( - self, conversation_id: str, messages: List[ChatMessage] + self, conversation_id: str, messages: List[ChatMessage] ) -> None: """ Store new messages in the conversation. @@ -265,7 +267,7 @@ async def store_new_messages( @abstractmethod async def process_query_with_streaming( - self, mcp_client: MCPClient, messages_history: List[ChatMessage], context: BotContext + self, mcp_client: MCPClient, messages_history: List[ChatMessage], context: BotContext ) -> Any: """ Process a query with streaming in a platform-specific way. @@ -288,7 +290,8 @@ async def run(self): pass @abstractmethod - async def get_messages_history(self, conversation: Conversation, context: BotContext, limit: int = 6) -> List[ChatMessage]: + async def get_messages_history(self, conversation: Conversation, context: BotContext, limit: int = 6) -> List[ + ChatMessage]: """ Get the messages history. """ diff --git a/mcp_clients/config.py b/mcp-clients/src/mcp_clients/config.py similarity index 94% rename from mcp_clients/config.py rename to mcp-clients/src/mcp_clients/config.py index 98c137f7..abdf994b 100644 --- a/mcp_clients/config.py +++ b/mcp-clients/src/mcp_clients/config.py @@ -3,10 +3,11 @@ """ import os + from dotenv import load_dotenv load_dotenv() # Flag to control whether database operations are performed # Set to False to skip all database operations -USE_PRODUCTION_DB = os.getenv("USE_PRODUCTION_DB", "False").lower() == "true" \ No newline at end of file +USE_PRODUCTION_DB = os.getenv("USE_PRODUCTION_DB", "False").lower() == "true" diff --git a/mcp-clients/src/mcp_clients/database b/mcp-clients/src/mcp_clients/database new file mode 160000 index 00000000..c6a6f86f --- /dev/null +++ b/mcp-clients/src/mcp_clients/database @@ -0,0 +1 @@ +Subproject commit c6a6f86f128ddafaee2b0948a93db87eca7127d1 diff --git a/mcp_clients/discord_bot.py b/mcp-clients/src/mcp_clients/discord_bot.py similarity index 94% rename from mcp_clients/discord_bot.py rename to mcp-clients/src/mcp_clients/discord_bot.py index 9a5d4ee1..104d4e72 100644 --- a/mcp_clients/discord_bot.py +++ b/mcp-clients/src/mcp_clients/discord_bot.py @@ -1,23 +1,20 @@ -import os -import logging import asyncio -from typing import Dict, Any, List, Optional +import logging +import os +from typing import Any, List, Optional from urllib.parse import quote -from mcp_client import MCPClient - import discord -from discord.ext import commands from discord import Embed, Color, DMChannel, Thread +from discord.ext import commands from discord.ui import View - from dotenv import load_dotenv -from base_bot import BaseBot, BotContext -from llms import ChatMessage, MessageRole, TextContent, FileContent, Conversation -from config import USE_PRODUCTION_DB +from mcp_clients.base_bot import BaseBot, BotContext +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.llms.base import ChatMessage, MessageRole, TextContent, FileContent, Conversation +from mcp_clients.mcp_client import MCPClient -# Load environment variables load_dotenv() # Configure logging @@ -48,13 +45,13 @@ class DiscordBotContext(BotContext): """ def __init__( - self, - platform_name: str, - user_id: str, - is_dm: bool, - channel, - user_message, - thread, + self, + platform_name: str, + user_id: str, + is_dm: bool, + channel, + user_message, + thread, ): """ Initialize the Discord bot context. @@ -93,9 +90,9 @@ def get_thread_id(self) -> Optional[str]: String representation of the thread ID, or None if not applicable """ if ( - self.user_message - and hasattr(self.user_message, "thread") - and self.user_message.thread + self.user_message + and hasattr(self.user_message, "thread") + and self.user_message.thread ): return str(self.user_message.thread.id) return None @@ -196,7 +193,7 @@ async def on_message(self, message): context, f"You need to link your {context.platform_name} account with our website to use the AI assistant features. I have sent you a Direct Message to link your account.", ) - + try: await message.author.send( f"You need to link your {context.platform_name} account with our website to use the AI assistant features using the login button below.", @@ -210,7 +207,7 @@ async def on_message(self, message): ) except Exception as e: logger.error(f"Error sending DM: {e}", exc_info=True) - + return context.mcp_client_id = verification_result["mcp_client_id"] @@ -225,7 +222,7 @@ async def on_message(self, message): view=self.create_config_view(), ) # Don't return here to allow processing with 0 MCP server - + usage_under_limit = await self.check_and_update_usage_limit(context) if not usage_under_limit: await self.send_message( @@ -301,11 +298,11 @@ async def slash_welcome(interaction: discord.Interaction): await interaction.response.send_message(embed=welcome_embed, view=view) async def send_message( - self, - context: DiscordBotContext, - message: str, - view=None, - embed=None, + self, + context: DiscordBotContext, + message: str, + view=None, + embed=None, ) -> Any: """ Send a message on Discord. @@ -325,10 +322,10 @@ async def send_message( return await context.channel.send(message, view=view, embed=embed) async def process_query_with_streaming( - self, - mcp_client: MCPClient, - messages_history: List[ChatMessage], - context: DiscordBotContext, + self, + mcp_client: MCPClient, + messages_history: List[ChatMessage], + context: DiscordBotContext, ) -> Any: """ Process a query with streaming in Discord-specific way. @@ -350,8 +347,8 @@ async def process_query_with_streaming( asyncio.timeout(200.0), ): async for chunk in mcp_client.process_query_stream( - messages_history, - self.store_new_messages if USE_PRODUCTION_DB else None, + messages_history, + self.store_new_messages if USE_PRODUCTION_DB else None, ): buffer += chunk # Check if the chunk contains a message split token @@ -416,9 +413,9 @@ async def process_query_with_streaming( return await self.send_message(context, f"Error processing query: {str(e)}") def create_login_view( - self, - platform_username: str, - platform_user_id: str, + self, + platform_username: str, + platform_user_id: str, ) -> View: """ Create a login config view with buttons @@ -537,7 +534,7 @@ async def send_welcome_message(self, channel, user): await channel.send(embed=welcome_embed, view=view) async def get_messages_history( - self, conversation: Conversation, context: BotContext, limit: int = 6 + self, conversation: Conversation, context: BotContext, limit: int = 6 ) -> List[ChatMessage]: """ Get the previous messages for the conversation. @@ -593,7 +590,7 @@ async def get_messages_history( return chat_messages def create_tool_call_embed( - self, special_content: str, title: str = "šŸ“² MCP Server Call" + self, special_content: str, title: str = "šŸ“² MCP Server Call" ) -> Embed: """ Create an embed for tool calls with special handling for long-running tools. @@ -627,6 +624,7 @@ def create_tool_call_embed( def run(self): """Run the Discord bot""" + print(DISCORD_TOKEN) self.client.run(DISCORD_TOKEN) diff --git a/mcp-clients/src/mcp_clients/llms/__init__.py b/mcp-clients/src/mcp_clients/llms/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mcp_clients/llms/anthropic.py b/mcp-clients/src/mcp_clients/llms/anthropic.py similarity index 78% rename from mcp_clients/llms/anthropic.py rename to mcp-clients/src/mcp_clients/llms/anthropic.py index 8a9ac99e..f17f689a 100644 --- a/mcp_clients/llms/anthropic.py +++ b/mcp-clients/src/mcp_clients/llms/anthropic.py @@ -1,8 +1,11 @@ -import logging import json +import logging +import uuid from typing import Optional, Dict, Any, AsyncGenerator, List + from anthropic import AsyncAnthropic -from llms import ( + +from mcp_clients.llms.base import ( BaseLLM, LLMMessageFormat, ChatMessage, @@ -12,7 +15,6 @@ ToolResultContent, ContentType, ) -import uuid # Configure logging logger = logging.getLogger("anthropic_client") @@ -30,9 +32,10 @@ def __init__(self, model: Optional[str] = None): self.anthropic_client = AsyncAnthropic() self.model = model or "claude-3-5-sonnet-20241022" # Default model self.max_tokens = self.config.max_tokens # Default max tokens + self._extracted_system_message = "" # Store system messages extracted from chat history async def create_streaming_generator( - self, messages: list, available_tools: list, resources: list = None + self, messages: list, available_tools: list, resources: list = None ) -> AsyncGenerator[str, None]: """ Create a streaming generator with the given messages and tools @@ -49,13 +52,26 @@ async def create_streaming_generator( # Prepare request parameters system_message = "" + # Add extracted system message from chat history + if self._extracted_system_message: + system_message = self._extracted_system_message + elif hasattr(Anthropic, '_last_extracted_system_message') and Anthropic._last_extracted_system_message: + # Use the system message extracted by from_chat_messages + system_message = Anthropic._last_extracted_system_message + # Store it in the instance for future use + self._extracted_system_message = Anthropic._last_extracted_system_message + # Add system message if operating in a specific platform context if self.platform and self.platform_config.get("system_message"): - system_message = self.platform_config["system_message"] + if system_message: + system_message += "\n\n" + self.platform_config["system_message"] + else: + system_message = self.platform_config["system_message"] + if resources: system_message += ( - "\n\nThere are some resources that may be relevant to the conversation. You can use them to answer the user's question.\n\n" - + "\n\n".join(resources) + "\n\nThere are some resources that may be relevant to the conversation. You can use them to answer the user's question.\n\n" + + "\n\n".join(resources) ) # Create request parameters @@ -114,8 +130,8 @@ async def create_streaming_generator( current_text += delta.text yield delta.text elif ( - delta.type == "input_json_delta" - and block_index in current_tool_calls + delta.type == "input_json_delta" + and block_index in current_tool_calls ): # Accumulate JSON for tool input current_tool_calls[block_index][ @@ -170,7 +186,7 @@ async def create_streaming_generator( yield f"\n[Error in streaming process: {str(e)}]\n" async def non_streaming_response( - self, messages: list, available_tools: Optional[list] = None + self, messages: list, available_tools: Optional[list] = None ) -> Dict[str, Any]: """ Get a non-streaming response from the LLM @@ -185,14 +201,26 @@ async def non_streaming_response( logger.info("Sending non-streaming request to Claude") # Prepare request parameters - system_message = None + system_message = "" request_messages = ( messages.copy() ) # Make a copy to avoid modifying the original + # Add extracted system message from chat history + if self._extracted_system_message: + system_message = self._extracted_system_message + elif hasattr(Anthropic, '_last_extracted_system_message') and Anthropic._last_extracted_system_message: + # Use the system message extracted by from_chat_messages + system_message = Anthropic._last_extracted_system_message + # Store it in the instance for future use + self._extracted_system_message = Anthropic._last_extracted_system_message + # Add system message if operating in a specific platform context if self.platform and self.platform_config.get("system_message"): - system_message = self.platform_config["system_message"] + if system_message: + system_message += "\n\n" + self.platform_config["system_message"] + else: + system_message = self.platform_config["system_message"] # Create request parameters kwargs = { @@ -223,7 +251,7 @@ def to_chat_messages(raw_messages: List[Dict[str, Any]]) -> List[ChatMessage]: Convert a list of Anthropic format messages to ChatMessage format Args: - anthropic_messages: List of messages in Anthropic format + raw_messages: List of messages in Anthropic format Returns: List of ChatMessages @@ -306,15 +334,39 @@ def from_chat_messages(chat_messages: List[ChatMessage]) -> List[Dict[str, Any]] List of messages in Anthropic format """ anthropic_messages = [] - - # TODO: Handle system messages + + # Extract and combine system messages + system_message_parts = [] + non_system_messages = [] + for chat_message in chat_messages: + if chat_message.role == MessageRole.SYSTEM: + # Extract text content from system messages + for content in chat_message.content: + if content.type == ContentType.TEXT and content.text: + system_message_parts.append(content.text) + else: + # Keep non-system messages for processing + non_system_messages.append(chat_message) + + # Store the combined system message in a class variable that can be accessed by instance methods + # Note: Using a simple approach since this is a static method + combined_system_message = "\n\n".join(system_message_parts) if system_message_parts else "" + + # Store in a module-level variable that instances can access + # This is a temporary solution since we can't modify the method signature + if hasattr(Anthropic, '_last_extracted_system_message'): + Anthropic._last_extracted_system_message = combined_system_message + else: + setattr(Anthropic, '_last_extracted_system_message', combined_system_message) + + # Process non-system messages + for chat_message in non_system_messages: # Map MessageRole to Anthropic role # Note: Anthropic doesn't support system or tool as message roles role_map = { MessageRole.USER: "user", MessageRole.ASSISTANT: "assistant", - MessageRole.SYSTEM: "user", # Map system to user with special handling MessageRole.TOOL: "user", # Map tool to user with special handling } diff --git a/mcp_clients/llms/base.py b/mcp-clients/src/mcp_clients/llms/base.py similarity index 97% rename from mcp_clients/llms/base.py rename to mcp-clients/src/mcp_clients/llms/base.py index 538a7cc1..b94fb617 100644 --- a/mcp_clients/llms/base.py +++ b/mcp-clients/src/mcp_clients/llms/base.py @@ -1,13 +1,12 @@ -from typing import Any, Dict, List, Literal, Optional, Union, AsyncGenerator -from pydantic import BaseModel, Field, model_validator -from abc import ABC, abstractmethod import logging +import uuid +from abc import ABC, abstractmethod +from datetime import datetime from enum import Enum - +from typing import AsyncGenerator from typing import List, Optional, Union, Dict, Any, Literal -from datetime import datetime -import uuid +from pydantic import BaseModel, Field, model_validator logger = logging.getLogger("llm_client") @@ -255,9 +254,9 @@ class BaseLLM(ABC): """ def __init__( - self, - config: Optional[BaseLLMConfig] = None, - message_format: Optional[LLMMessageFormat] = LLMMessageFormat.OPENAI, + self, + config: Optional[BaseLLMConfig] = None, + message_format: Optional[LLMMessageFormat] = LLMMessageFormat.OPENAI, ): """ Initialize the base LLM client @@ -304,7 +303,7 @@ def get_message_format(self) -> str: @abstractmethod async def create_streaming_generator( - self, messages: list, available_tools: list, resources: list = None + self, messages: list, available_tools: list, resources: list = None ) -> AsyncGenerator[str, None]: """ Create a streaming generator with the given messages and tools @@ -320,7 +319,7 @@ async def create_streaming_generator( @abstractmethod async def non_streaming_response( - self, messages: list, available_tools: Optional[list] = None + self, messages: list, available_tools: Optional[list] = None ) -> Dict[str, Any]: """ Get a non-streaming response from the LLM diff --git a/mcp_clients/llms/configs.py b/mcp-clients/src/mcp_clients/llms/configs.py similarity index 100% rename from mcp_clients/llms/configs.py rename to mcp-clients/src/mcp_clients/llms/configs.py diff --git a/mcp_clients/llms/openai.py b/mcp-clients/src/mcp_clients/llms/openai.py similarity index 93% rename from mcp_clients/llms/openai.py rename to mcp-clients/src/mcp_clients/llms/openai.py index 3be76814..e79422b4 100644 --- a/mcp_clients/llms/openai.py +++ b/mcp-clients/src/mcp_clients/llms/openai.py @@ -1,8 +1,11 @@ import logging import uuid -from typing import Optional, Dict, Any, AsyncGenerator, List, Union +from typing import Optional, Dict, Any, AsyncGenerator, List + +import time from openai import AsyncOpenAI -from llms import ( + +from mcp_clients.llms.base import ( BaseLLM, ChatMessage, MessageRole, @@ -11,7 +14,7 @@ ToolResultContent, ContentType, ) -import time + # Configure logging logger = logging.getLogger("openai_client") @@ -23,16 +26,18 @@ class OpenAI(BaseLLM): """ def __init__( - self, - api_key: Optional[str] = None, - model: Optional[str] = None, - base_url: Optional[str] = None, + self, + api_key: Optional[str] = None, + model: Optional[str] = None, + base_url: Optional[str] = None, ): """ Initialize the OpenAI client Args: - config: Optional configuration for OpenAI + api_key: Optional configuration for OpenAI + model: Optional model of GenAI + base_url: Optional url to connect to given api_key GenAI """ super().__init__() self.openai_client = AsyncOpenAI(api_key=api_key, base_url=base_url) @@ -40,7 +45,7 @@ def __init__( self.max_tokens = self.config.max_tokens async def create_streaming_generator( - self, messages: list, available_tools: list, resources: list = None + self, messages: list, available_tools: list, resources: list = None ) -> AsyncGenerator[str, None]: """ Create an OpenAI streaming generator with the given messages and tools. @@ -64,7 +69,8 @@ async def create_streaming_generator( system_message_content = self.platform_config.get("system_message") if resources: - system_message_content += "\n\nThere are some resources that may be relevant to the conversation. You can use them to answer the user's question.\n\n" + "\n\n".join(resources) + system_message_content += "\n\nThere are some resources that may be relevant to the conversation. You can use them to answer the user's question.\n\n" + "\n\n".join( + resources) if system_message_content and not messages[0].get("role") == "system": request_params["messages"].insert( 0, {"role": "system", "content": system_message_content} @@ -128,8 +134,8 @@ async def create_streaming_generator( } # Accumulate arguments string if ( - tool_call_chunk.function - and tool_call_chunk.function.arguments + tool_call_chunk.function + and tool_call_chunk.function.arguments ): accumulated_tool_calls[index]["function"][ "arguments" @@ -164,7 +170,7 @@ async def create_streaming_generator( yield f"\n[Error in OpenAI streaming process: {str(e)}]\n" async def non_streaming_response( - self, messages: list, available_tools: Optional[list] = None + self, messages: list, available_tools: Optional[list] = None ) -> Dict[str, Any]: """ Get a non-streaming response from OpenAI LLM. @@ -222,7 +228,7 @@ def to_chat_messages(raw_messages: List[Dict[str, Any]]) -> List[ChatMessage]: Convert a list of OpenAI format messages to ChatMessage format Args: - openai_messages: List of messages in OpenAI format + raw_messages: List of messages in OpenAI format Returns: List of ChatMessages @@ -246,9 +252,8 @@ def to_chat_messages(raw_messages: List[Dict[str, Any]]) -> List[ChatMessage]: parsed_arguments = {} if isinstance(arguments, str): + import json try: - import json - parsed_arguments = json.loads(arguments) except json.JSONDecodeError as e: logger.error( @@ -269,9 +274,9 @@ def to_chat_messages(raw_messages: List[Dict[str, Any]]) -> List[ChatMessage]: # Handle tool results if ( - role == MessageRole.TOOL - and openai_message.get("content") - and openai_message.get("tool_call_id") + role == MessageRole.TOOL + and openai_message.get("content") + and openai_message.get("tool_call_id") ): content_list.append( ToolResultContent( @@ -313,7 +318,7 @@ def from_chat_messages(chat_messages: List[ChatMessage]) -> List[Dict[str, Any]] ), } ) - + if chat_message.role == MessageRole.USER: str_content = "" for content in chat_message.content: diff --git a/mcp_clients/local_mcp_servers.json b/mcp-clients/src/mcp_clients/local_mcp_servers.json similarity index 100% rename from mcp_clients/local_mcp_servers.json rename to mcp-clients/src/mcp_clients/local_mcp_servers.json diff --git a/mcp_clients/mcp_client.py b/mcp-clients/src/mcp_clients/mcp_client.py similarity index 94% rename from mcp_clients/mcp_client.py rename to mcp-clients/src/mcp_clients/mcp_client.py index d405229c..29437d45 100644 --- a/mcp_clients/mcp_client.py +++ b/mcp-clients/src/mcp_clients/mcp_client.py @@ -1,30 +1,22 @@ -import logging -from urllib.parse import urlparse -from typing import Optional, Dict, Any, AsyncGenerator, List, Tuple -from contextlib import AsyncExitStack -import uuid -import os import asyncio -import time import base64 +import logging +import os +import uuid +from contextlib import AsyncExitStack +from typing import Any, AsyncGenerator, Dict, List, Tuple, Optional +from urllib.parse import urlparse + import markitdown -from mcp.client.session import ClientSession +import time +from dotenv import load_dotenv +from mcp import ClientSession, StdioServerParameters, stdio_client from mcp.client.sse import sse_client -from mcp.client.stdio import StdioServerParameters, stdio_client - -from llms import ( - BaseLLM, - LLMMessageFormat, - ChatMessage, - MessageRole, - ToolResultContent, - ContentType, - Conversation, -) -from dotenv import load_dotenv -from llms.anthropic import Anthropic -from llms.openai import OpenAI +from mcp_clients.llms.anthropic import Anthropic +from mcp_clients.llms.base import Conversation, BaseLLM, LLMMessageFormat, ContentType, MessageRole, \ + ToolResultContent, ChatMessage +from mcp_clients.llms.openai import OpenAI # Load environment variables load_dotenv() @@ -48,11 +40,11 @@ class MCPClient: """ def __init__( - self, - platform_name: str = None, - api_name: str = None, - provider: str = None, - conversation: Conversation = None, + self, + platform_name: str = None, + api_name: str = None, + provider: str = None, + conversation: Conversation = None, ): """ Initialize the MCP client. @@ -79,9 +71,9 @@ def __init__( # Set platform context if provided if platform_name: self.llm_client.set_platform(platform_name) - + def _initialize_llm_client( - self, api_name: str = None, provider: str = None + self, api_name: str = None, provider: str = None ) -> BaseLLM: """ Initialize LLM client based on provided provider and set model based on api_name @@ -126,12 +118,12 @@ def _redact_sensitive_args(self, tool_args: Dict[str, Any]) -> Dict[str, Any]: """ if not isinstance(tool_args, dict): return tool_args - + redacted_args = {} for key, value in tool_args.items(): # Check if this parameter name matches any sensitive patterns is_sensitive = any(param in key.lower() for param in SENSITIVE_PARAMS) - + if is_sensitive and isinstance(value, str): # Redact the value, preserving first and last two characters if long enough if len(value) > 8: @@ -143,11 +135,11 @@ def _redact_sensitive_args(self, tool_args: Dict[str, Any]) -> Dict[str, Any]: redacted_args[key] = self._redact_sensitive_args(value) else: redacted_args[key] = value - + return redacted_args async def connect_to_server( - self, url: str, args: list = None, env: Dict[str, str] = None + self, url: str, args: list = None, env: Dict[str, str] = None ) -> Tuple[str, Optional[str]]: """ Connect to an MCP server via URL @@ -206,7 +198,7 @@ async def connect_to_server( if server_id in self.exit_stacks: await self.exit_stacks[server_id].aclose() del self.exit_stacks[server_id] - logger.error(f"Error connecting to MCP server: {e}") + logger.exception(f"Error connecting to MCP server: {e}") return f"Error connecting to MCP server: {str(e)}", None async def refresh_tool_cache(self, server_id: str) -> List[Dict[str, Any]]: @@ -252,7 +244,7 @@ async def refresh_tool_cache(self, server_id: str) -> List[Dict[str, Any]]: return [] async def get_tools_for_server( - self, server_id: str, use_cache: bool = True + self, server_id: str, use_cache: bool = True ) -> List[Dict[str, Any]]: """ Get the current tools available from a specific server @@ -337,7 +329,7 @@ async def find_server_for_tool(self, tool_name: str) -> Optional[str]: return None async def _process_tool_call( - self, tool_name: str, arguments: dict[str, Any] | None = None + self, tool_name: str, arguments: dict[str, Any] | None = None ) -> AsyncGenerator[str, None]: """ Process a tool call and return the result @@ -369,7 +361,7 @@ async def _process_tool_call( message_split_token = self.llm_client.get_message_split_token() # Create a task for the actual tool call tool_call_task = asyncio.create_task(session.call_tool(tool_name, arguments)) - + # Wait for the tool call to complete with progress updates every 30 seconds start_time = asyncio.get_event_loop().time() while not tool_call_task.done(): @@ -383,7 +375,7 @@ async def _process_tool_call( elapsed = int(asyncio.get_event_loop().time() - start_time) logger.info(f"Tool call {tool_name} still running after {elapsed} seconds") yield f"\n[Tool {tool_name} still running... ({elapsed} seconds elapsed)]{message_split_token}\n" - + # If we reach here, the task completed yield str(await tool_call_task) except Exception as e: @@ -405,9 +397,9 @@ def is_final_response(self, messages: List[ChatMessage]) -> bool: ) async def process_query_stream( - self, - messages_history: List[ChatMessage], - store_new_messages_callback: callable = None, + self, + messages_history: List[ChatMessage], + store_new_messages_callback: callable = None, ) -> AsyncGenerator[str, None]: """ Process a query using Claude and available tools with streaming responses @@ -441,7 +433,7 @@ async def process_query_stream( start_time = time.time() # Use the LLMClient to create the streaming generator async for chunk_text in self.llm_client.create_streaming_generator( - provider_messages, available_tools, text_resource_contents + provider_messages, available_tools, text_resource_contents ): yield chunk_text logger.info(f"LLM took {time.time() - start_time} seconds to complete") @@ -475,7 +467,7 @@ async def process_query_stream( # Redact sensitive information from tool arguments before displaying display_args = self._redact_sensitive_args(tool_args) if tool_args else None yield f"\n[Calling tool {tool_name} with arguments {str(display_args)[:100]}...]{message_split_token}\n" - + # Process tool call with progress updates result_text = "" start_time = time.time() @@ -488,7 +480,7 @@ async def process_query_stream( result_text = update logger.info(f"Tool {tool_name} took {time.time() - start_time} seconds to complete") logger.info(f"Tool result: {result_text}") - + # Add tool result to next user message tool_result_content.append( ToolResultContent( @@ -505,7 +497,7 @@ async def process_query_stream( if store_new_messages_callback: start_time = time.time() await store_new_messages_callback( - self.conversation.id, chat_messages[messages_history_len - 1 :] + self.conversation.id, chat_messages[messages_history_len - 1:] ) logger.info(f"Store new messages took {time.time() - start_time} seconds to complete") @@ -530,7 +522,7 @@ def get_message_split_token(self) -> str: """ return self.llm_client.get_message_split_token() - async def list_all_resources(self) -> List[Dict[str, Any]]: + async def list_all_resources(self) -> List[Dict[str, Any]]: """ List all available resources from all connected servers @@ -542,7 +534,7 @@ async def list_all_resources(self) -> List[Dict[str, Any]]: for server_id in self.sessions.keys(): session = self.sessions[server_id] response = await session.list_resources() - + # Handle direct resources if hasattr(response, "resources"): resources.extend([ @@ -579,16 +571,16 @@ async def read_resource(self, server_id: str, uri: str) -> List[Dict[str, Any]]: try: session = self.sessions[server_id] response = await session.read_resource(uri) - + # Process the resource contents contents = [] - + for content in response.contents: content_data = { "uri": content.uri, "mimeType": content.mimeType if hasattr(content, "mimeType") else None, } - + # Handle text or binary content if hasattr(content, "text") and content.text is not None: content_data["text"] = content.text @@ -602,12 +594,12 @@ async def read_resource(self, server_id: str, uri: str) -> List[Dict[str, Any]]: content_data["text"] = markdown_text else: pass - + contents.append(content_data) - + logger.info(f"Successfully read resource {uri} from server {server_id}") return contents - + except Exception as e: logger.error(f"Error reading resource {uri} from server {server_id}: {str(e)}") return [] diff --git a/mcp_clients/slack/__init__.py b/mcp-clients/src/mcp_clients/slack/__init__.py similarity index 92% rename from mcp_clients/slack/__init__.py rename to mcp-clients/src/mcp_clients/slack/__init__.py index 02c7cb95..0b3eb6ea 100644 --- a/mcp_clients/slack/__init__.py +++ b/mcp-clients/src/mcp_clients/slack/__init__.py @@ -1,3 +1,3 @@ """ Integration package for Slack MCP client. -""" \ No newline at end of file +""" diff --git a/mcp_clients/slack/context.py b/mcp-clients/src/mcp_clients/slack/context.py similarity index 79% rename from mcp_clients/slack/context.py rename to mcp-clients/src/mcp_clients/slack/context.py index 2e6dac68..86bc3db9 100644 --- a/mcp_clients/slack/context.py +++ b/mcp-clients/src/mcp_clients/slack/context.py @@ -2,7 +2,9 @@ Slack bot context module. """ from typing import Dict, Any, Optional -from base_bot import BotContext + +from mcp_clients.base_bot import BotContext + class SlackBotContext(BotContext): """ @@ -11,14 +13,14 @@ class SlackBotContext(BotContext): """ def __init__( - self, - platform_name: str, - user_id: str, - channel_id: str, - thread_ts: Optional[str] = None, - initial_message: Optional[Dict[str, Any]] = None, - user_message: Optional[Dict[str, Any]] = None, - bot_token: Optional[str] = None, + self, + platform_name: str, + user_id: str, + channel_id: str, + thread_ts: Optional[str] = None, + initial_message: Optional[Dict[str, Any]] = None, + user_message: Optional[Dict[str, Any]] = None, + bot_token: Optional[str] = None, ): """ Initialize the Slack bot context. @@ -37,7 +39,7 @@ def __init__( self.thread_ts = thread_ts self.initial_message = initial_message self.bot_token = bot_token - + def get_channel_id(self) -> Optional[str]: """ Get the channel ID for the current context. @@ -46,7 +48,7 @@ def get_channel_id(self) -> Optional[str]: String representation of the channel ID, or None if not applicable """ return str(self.channel_id) - + def get_thread_id(self) -> Optional[str]: """ Get the thread ID for the current context, if applicable. @@ -54,4 +56,4 @@ def get_thread_id(self) -> Optional[str]: Returns: String representation of the thread ID, or None if not applicable """ - return self.thread_ts \ No newline at end of file + return self.thread_ts diff --git a/mcp_clients/slack/event_routes.py b/mcp-clients/src/mcp_clients/slack/event_routes.py similarity index 93% rename from mcp_clients/slack/event_routes.py rename to mcp-clients/src/mcp_clients/slack/event_routes.py index 98da9c14..820392b4 100644 --- a/mcp_clients/slack/event_routes.py +++ b/mcp-clients/src/mcp_clients/slack/event_routes.py @@ -1,13 +1,14 @@ import logging +from typing import Callable + import time from fastapi import Request, Response from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from starlette.middleware.base import BaseHTTPMiddleware -from typing import Callable, Dict, Any, Optional -import json logger = logging.getLogger("slack_bot") + def setup_http_routes(router, slack_handler: AsyncSlackRequestHandler): """ Set up HTTP routes for Slack app integration @@ -17,15 +18,15 @@ def setup_http_routes(router, slack_handler: AsyncSlackRequestHandler): slack_handler: Slack request handler for processing events """ route_logger = logging.getLogger("slack_bot.routes") - + @router.post("/slack/events") async def slack_events_post(request: Request): """Endpoint for Slack events (POST method)""" route_logger.info("--- Received Slack event (POST)") - + # Get the JSON body body = await request.json() - + # Handle URL verification challenge if body.get("type") == "url_verification": route_logger.info("Handling Slack URL verification challenge") @@ -33,45 +34,46 @@ async def slack_events_post(request: Request): if challenge: route_logger.info(f"Responding with challenge: {challenge}") return {"challenge": challenge} - + # Filter out bot messages early if body.get("event") and body.get("event").get("bot_id"): route_logger.debug("--- Ignoring bot message") return {"ok": True} # Return 200 OK to acknowledge - + response = await slack_handler.handle(request) return response - + @router.post("/slack/interactive") async def slack_interactive(request: Request): """Endpoint for Slack interactive components (buttons, modals, etc.)""" route_logger.info("---Received Slack interactive component request") return await slack_handler.handle(request) - + + class LoggingMiddleware(BaseHTTPMiddleware): """Middleware for logging requests and responses""" - + async def dispatch(self, request: Request, call_next: Callable) -> Response: start_time = time.time() - + # Get request details path = request.url.path method = request.method - + # Log request logger.info(f"Request: {method} {path}") - + # Process the request try: response = await call_next(request) - + # Calculate processing time process_time = time.time() - start_time - + # Log response status_code = response.status_code logger.info(f"Response: {method} {path} - Status: {status_code} - Time: {process_time:.3f}s") - + return response except Exception as e: # Log error diff --git a/mcp_clients/slack/interactive_handlers.py b/mcp-clients/src/mcp_clients/slack/interactive_handlers.py similarity index 94% rename from mcp_clients/slack/interactive_handlers.py rename to mcp-clients/src/mcp_clients/slack/interactive_handlers.py index 8bfbeb44..02bc16b3 100644 --- a/mcp_clients/slack/interactive_handlers.py +++ b/mcp-clients/src/mcp_clients/slack/interactive_handlers.py @@ -5,17 +5,18 @@ logger = logging.getLogger("slack_bot") + def register_interactive_handlers(app): """Register all interactive component handlers with the Slack app.""" - + @app.action("login_button") async def handle_login_button(ack, body, logger): """Handle login button click actions""" await ack() logger.info(f"Login button clicked: {body.get('user', {}).get('id')}") - + @app.action("connect_mcp_server_button") async def handle_connect_mcp_server_button(ack, body, logger): """Handle connect to MCP server button click actions""" await ack() - logger.info(f"Connect to MCP server button clicked: {body.get('user', {}).get('id')}") \ No newline at end of file + logger.info(f"Connect to MCP server button clicked: {body.get('user', {}).get('id')}") diff --git a/mcp_clients/slack/message_handlers.py b/mcp-clients/src/mcp_clients/slack/message_handlers.py similarity index 92% rename from mcp_clients/slack/message_handlers.py rename to mcp-clients/src/mcp_clients/slack/message_handlers.py index 27cc5d10..4c86baf4 100644 --- a/mcp_clients/slack/message_handlers.py +++ b/mcp-clients/src/mcp_clients/slack/message_handlers.py @@ -1,25 +1,27 @@ """ Handlers for Slack message events, including app mentions and direct messages. """ -import logging import asyncio -from typing import Dict, Any, List import logging +from typing import Dict, Any, List from urllib.parse import quote -from slack.settings import settings -from slack.context import SlackBotContext -from config import USE_PRODUCTION_DB + +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.slack.context import SlackBotContext +from mcp_clients.slack.settings import settings logger = logging.getLogger("slack_bot") if USE_PRODUCTION_DB: - from database.database import get_mcp_client_id_by_slack_info + from mcp_clients.database.database import get_mcp_client_id_by_slack_info else: # Define dummy function when database is not used async def get_mcp_client_id_by_slack_info(team_id, user_id): - logger.info(f"Database operations skipped: get_mcp_client_id_by_slack_info for team_id={team_id}, user_id={user_id}") + logger.info( + f"Database operations skipped: get_mcp_client_id_by_slack_info for team_id={team_id}, user_id={user_id}") return None + def create_login_blocks(platform_username: str, platform_user_id: str, team_id: str = None) -> List[Dict[str, Any]]: """ Create Slack blocks with a login button similar to Discord format @@ -34,15 +36,15 @@ def create_login_blocks(platform_username: str, platform_user_id: str, team_id: """ # Construct login URL with query parameters login_url = f"{settings.WEBSITE_URL}/auth/sign-in" - + # URL encode the parameters encoded_username = quote(platform_username) login_url = f"{login_url}?platform=slack&external_user_id={platform_user_id}&external_username={encoded_username}" - + # Add team_id if available if team_id: login_url = f"{login_url}&external_id={team_id}" - + # Create blocks with login button return [ { @@ -69,7 +71,8 @@ def create_login_blocks(platform_username: str, platform_user_id: str, team_id: ] } ] - + + def create_connect_mcp_server_blocks(mcp_client_id: str = None) -> List[Dict[str, Any]]: """ Create Slack blocks with warning and a button to connect to MCP servers @@ -82,11 +85,11 @@ def create_connect_mcp_server_blocks(mcp_client_id: str = None) -> List[Dict[str """ # Construct configure URL configure_url = f"{settings.WEBSITE_URL}/home" - + # Add mcp_client_id as path parameter if available if mcp_client_id: configure_url = f"{configure_url}/mcp-client/{mcp_client_id}" - + # Create blocks with warning and connect button return [ { @@ -125,12 +128,13 @@ def create_connect_mcp_server_blocks(mcp_client_id: str = None) -> List[Dict[str }, ] + def register_message_handlers(app, bot): """Register all message-related event handlers with the Slack app.""" # Add user lock to prevent concurrent message processing if not hasattr(bot, 'user_locks'): bot.user_locks = {} - + async def add_loading_reaction(client, channel_id, message_ts): """Add loading reaction to a message""" try: @@ -141,7 +145,7 @@ async def add_loading_reaction(client, channel_id, message_ts): ) except Exception as e: logger.error(f"Error adding loading reaction: {e}") - + async def remove_loading_reaction(client, channel_id, message_ts): """Remove loading reaction from a message""" try: @@ -152,30 +156,30 @@ async def remove_loading_reaction(client, channel_id, message_ts): ) except Exception as e: logger.error(f"Error removing loading reaction: {e}") - + @app.event("app_mention") async def handle_app_mention(event, context, client, ack, logger): """Handle mentions of the bot in public channels""" await ack() - + # Retrieve the bot token from the context provided by the authorize method bot_token = context.get("bot_token") if not bot_token: logger.error("No bot token available in context") return - + user_id = event.get("user") text = event.get("text", "") channel_id = event.get("channel") thread_ts = event.get("thread_ts", event.get("ts")) message_ts = event.get("ts") - + await add_loading_reaction(client, channel_id, message_ts) - + # Check if user has a message currently being processed if user_id not in bot.user_locks: bot.user_locks[user_id] = asyncio.Lock() - + # Try to acquire the lock, but don't block if it's already locked if bot.user_locks[user_id].locked(): try: @@ -188,7 +192,7 @@ async def handle_app_mention(event, context, client, ack, logger): except Exception as e: logger.error(f"Error sending busy message: {e}") return - + # Extract clean text from mention (remove the bot mention) clean_text = text # Find the bot mention pattern and remove it @@ -196,9 +200,9 @@ async def handle_app_mention(event, context, client, ack, logger): parts = text.split(">", 1) if len(parts) > 1: clean_text = parts[1].strip() - + logger.info(f"--- Received app mention from user {user_id} in channel {channel_id}: {clean_text}") - + # Process the message with the lock acquired try: async with asyncio.timeout(200), bot.user_locks[user_id]: @@ -226,10 +230,10 @@ async def handle_app_mention(event, context, client, ack, logger): except Exception as e: logger.error(f"Error getting user info: {e}") username = user_id - + # Get team ID from context team_id = context.get("team_id") - + try: # Send login blocks as a DM to the user await client.chat_postMessage( @@ -255,14 +259,15 @@ async def handle_app_mention(event, context, client, ack, logger): if not server_urls: team_id = context.get("team_id") mcp_client_id = await get_mcp_client_id_by_slack_info(team_id, user_id) - + await client.chat_postMessage( channel=channel_id, thread_ts=thread_ts, - blocks=create_connect_mcp_server_blocks(mcp_client_id or verification_result.get("mcp_client_id")) + blocks=create_connect_mcp_server_blocks( + mcp_client_id or verification_result.get("mcp_client_id")) ) # Don't return here to allow processing with 0 MCP server - + if clean_text: usage_under_limit = await bot.check_and_update_usage_limit(slack_context) if not usage_under_limit: @@ -272,7 +277,7 @@ async def handle_app_mention(event, context, client, ack, logger): ) await remove_loading_reaction(client, channel_id, message_ts) return - + try: mcp_client = await bot.initialize_mcp_client( context=slack_context, server_urls=server_urls @@ -288,11 +293,13 @@ async def handle_app_mention(event, context, client, ack, logger): await bot.send_message(slack_context, f"Error processing query: {str(e)}") finally: await remove_loading_reaction(client, channel_id, message_ts) - logger.info(f" --- Completed processing query from user {user_id} in channel {channel_id}: {clean_text}") + logger.info( + f" --- Completed processing query from user {user_id} in channel {channel_id}: {clean_text}") await mcp_client.cleanup() except asyncio.TimeoutError: - logger.warning(f"Processing timed out for user {user_id} in channel {channel_id} after 200 seconds. Lock released.") + logger.warning( + f"Processing timed out for user {user_id} in channel {channel_id} after 200 seconds. Lock released.") try: await client.chat_postMessage( channel=channel_id, @@ -305,32 +312,33 @@ async def handle_app_mention(event, context, client, ack, logger): # Ensure reaction is removed regardless of timeout or success/failure await remove_loading_reaction(client, channel_id, message_ts) # Lock is released automatically by async with when exiting the block or due to timeout/exception - + @app.event("message") async def handle_direct_message(event, context, client, ack, logger): """Handle direct messages to the bot""" # Only process DMs, not messages in channels if event.get("channel_type") == "im" and not event.get("bot_id"): await ack() - + # Retrieve the bot token from the context provided by the authorize method bot_token = context.get("bot_token") if not bot_token: logger.error("No bot token available in context") return - + user_id = event.get("user") text = event.get("text", "") channel_id = event.get("channel") - thread_ts = event.get("thread_ts", event.get("ts")) # since we reply DM in thread, we set thread_ts to event ts + thread_ts = event.get("thread_ts", + event.get("ts")) # since we reply DM in thread, we set thread_ts to event ts message_ts = event.get("ts") - + await add_loading_reaction(client, channel_id, message_ts) - + # Check if user has a message currently being processed if user_id not in bot.user_locks: bot.user_locks[user_id] = asyncio.Lock() - + # Try to acquire the lock, but don't block if it's already locked if bot.user_locks[user_id].locked(): try: @@ -342,7 +350,7 @@ async def handle_direct_message(event, context, client, ack, logger): except Exception as e: logger.error(f"Error sending busy message: {e}") return - + logger.info(f"---Received DM from user {user_id} in channel {channel_id}: {text}") try: # Process the message with the lock acquired @@ -371,10 +379,10 @@ async def handle_direct_message(event, context, client, ack, logger): except Exception as e: logger.error(f"Error getting user info: {e}") username = user_id - + # Get team ID from context team_id = context.get("team_id") - + # Use create_login_blocks function to generate blocks with proper login URL await client.chat_postMessage( channel=channel_id, @@ -391,14 +399,15 @@ async def handle_direct_message(event, context, client, ack, logger): if not server_urls: team_id = context.get("team_id") mcp_client_id = await get_mcp_client_id_by_slack_info(team_id, user_id) - + await client.chat_postMessage( channel=channel_id, thread_ts=thread_ts, - blocks=create_connect_mcp_server_blocks(mcp_client_id or verification_result.get("mcp_client_id")) + blocks=create_connect_mcp_server_blocks( + mcp_client_id or verification_result.get("mcp_client_id")) ) # Don't return here to allow processing with 0 MCP server - + # Process the message content if text: usage_under_limit = await bot.check_and_update_usage_limit(slack_context) @@ -409,7 +418,7 @@ async def handle_direct_message(event, context, client, ack, logger): ) await remove_loading_reaction(client, channel_id, message_ts) return - + try: mcp_client = await bot.initialize_mcp_client( context=slack_context, server_urls=server_urls @@ -425,10 +434,12 @@ async def handle_direct_message(event, context, client, ack, logger): await bot.send_message(slack_context, f"Error processing query: {str(e)}") finally: await remove_loading_reaction(client, channel_id, message_ts) - logger.info(f" --- Completed processing query from user {user_id} in channel {channel_id}: {text}") + logger.info( + f" --- Completed processing query from user {user_id} in channel {channel_id}: {text}") await mcp_client.cleanup() except asyncio.TimeoutError: - logger.warning(f"Processing timed out for user {user_id} in channel {channel_id} after 200 seconds. Lock released.") + logger.warning( + f"Processing timed out for user {user_id} in channel {channel_id} after 200 seconds. Lock released.") try: await client.chat_postMessage( channel=channel_id, diff --git a/mcp_clients/slack/oauth_routes.py b/mcp-clients/src/mcp_clients/slack/oauth_routes.py similarity index 93% rename from mcp_clients/slack/oauth_routes.py rename to mcp-clients/src/mcp_clients/slack/oauth_routes.py index f5e47d35..697ac86f 100644 --- a/mcp_clients/slack/oauth_routes.py +++ b/mcp-clients/src/mcp_clients/slack/oauth_routes.py @@ -1,23 +1,25 @@ -import logging -import json import base64 +import json +import logging import urllib.parse from typing import Optional, Dict, Any -from slack.settings import settings -from slack_sdk.web.async_client import AsyncWebClient -from config import USE_PRODUCTION_DB import httpx from fastapi import APIRouter, HTTPException, status from fastapi.responses import RedirectResponse from pydantic import BaseModel +from slack_sdk.web.async_client import AsyncWebClient + +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.slack.settings import settings if USE_PRODUCTION_DB: - from database.database import create_mcp_client, insert_slack_client_auth + from mcp_clients.database.database import create_mcp_client, insert_slack_client_auth # Configure logging logger = logging.getLogger("slack_oauth") + class SlackOAuthResponse(BaseModel): """Model for Slack OAuth API response""" ok: bool @@ -31,6 +33,7 @@ class SlackOAuthResponse(BaseModel): bot_user_id: Optional[str] = None error: Optional[str] = None + def setup_oauth_routes(router: APIRouter): """ Set up OAuth routes for Slack app installation @@ -38,37 +41,37 @@ def setup_oauth_routes(router: APIRouter): Args: router: FastAPI router to add routes to """ - + @router.get("/slack/oauth/install") async def install_slack_app(account_id: str = None): """ Redirect to Slack OAuth authorization page - """ + """ if not settings.SLACK_CLIENT_ID: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="SLACK_CLIENT_ID environment variable is not set" ) - + query_params = { "client_id": settings.SLACK_CLIENT_ID, "scope": settings.SLACK_SCOPES, "redirect_uri": settings.SLACK_REDIRECT_URI } - + state = None - # Create state parameter with account_id if provided, this will be passed when user add Klavis powered Slack MCP Client + # Create state parameter with account_id if provided, this will be passed when user add Klavis powered Slack MCP Client if account_id: logger.info(f"Installing Klavis powered Slack MCP Client with account_id: {account_id}") state_data = {"account_id": account_id} state = base64.urlsafe_b64encode(json.dumps(state_data).encode()).decode() if state: query_params["state"] = state - + encoded_params = urllib.parse.urlencode(query_params) - + auth_url = f"https://slack.com/oauth/v2/authorize?{encoded_params}" - + logger.info(f"Redirecting to Slack OAuth: {auth_url}") return RedirectResponse(auth_url) @@ -85,11 +88,11 @@ async def oauth_callback(code: str = None, error: str = None, state: str = None) if error: logger.error(f"OAuth error: {error}") return {"error": error} - + if not code: logger.error("No authorization code received") return {"error": "No authorization code received"} - + # Extract account_id from state parameter if it exists account_id = None if state: @@ -100,31 +103,31 @@ async def oauth_callback(code: str = None, error: str = None, state: str = None) logger.info(f"OAuth callback received with account_id: {account_id} (from state)") except Exception as e: logger.error(f"Error decoding state parameter: {str(e)}") - + oauth_response = await exchange_code_for_token(code) - + if not oauth_response.ok: logger.error(f"OAuth token exchange failed: {oauth_response.error}") return {"error": oauth_response.error or "OAuth token exchange failed"} - + logger.info(f"Successfully authenticated for team: {oauth_response.team}") - + if not oauth_response.team or not oauth_response.team.get("id"): logger.error("Team ID not found in OAuth response") return {"error": "Team ID not found in OAuth response"} - + # Get user profile to obtain username user_id = oauth_response.authed_user.get("id") if oauth_response.authed_user else None username = None - + if user_id: username, _ = await get_user_profile( - user_id=user_id, + user_id=user_id, bot_token=oauth_response.access_token ) - + mcp_client_result = {"mcp_client": None} - + if account_id and USE_PRODUCTION_DB: # Create Slack MCP Client with auth metadata mcp_client_result = await create_mcp_client( @@ -135,30 +138,30 @@ async def oauth_callback(code: str = None, error: str = None, state: str = None) external_username=username, platform="slack", ) - + if mcp_client_result["error"]: error_msg = f"Failed to create MCP client for account {account_id}" logger.error(error_msg) return {"error": error_msg} - + # Save auth metadata to `slack_client_auth` table auth_result = await insert_slack_client_auth( team_id=oauth_response.team.get("id"), auth_metadata=oauth_response.model_dump() ) - + if auth_result["error"]: logger.error(f"Failed to save Slack auth metadata: {auth_result['error']}") # do not return, continue - + # Send welcome message try: user_id = oauth_response.authed_user.get("id") if oauth_response.authed_user else None if user_id: success, error_message = await send_welcome_message( - user_id=user_id, + user_id=user_id, username=username, - bot_token=oauth_response.access_token, + bot_token=oauth_response.access_token, mcp_client_result=mcp_client_result ) if not success: @@ -167,7 +170,7 @@ async def oauth_callback(code: str = None, error: str = None, state: str = None) logger.warning("Could not identify installing user to send welcome message") except Exception as e: logger.error(f"Error sending welcome message: {str(e)}") - + if account_id and mcp_client_result["mcp_client"] and mcp_client_result["mcp_client"].get("id"): # Redirect to client-specific page redirect_url = f"{settings.WEBSITE_URL}/home/mcp-client/{mcp_client_result['mcp_client'].get('id')}" @@ -178,7 +181,7 @@ async def oauth_callback(code: str = None, error: str = None, state: str = None) async def exchange_code_for_token(code: str) -> SlackOAuthResponse: token_url = "https://slack.com/api/oauth.v2.access" - + # Prepare data for token exchange using urllib.parse data = { "client_id": settings.SLACK_CLIENT_ID, @@ -186,14 +189,14 @@ async def exchange_code_for_token(code: str) -> SlackOAuthResponse: "code": code, "redirect_uri": settings.SLACK_REDIRECT_URI } - + async with httpx.AsyncClient() as client: response = await client.post( token_url, data=data, headers={"Content-Type": "application/x-www-form-urlencoded"} ) - + data = response.json() return SlackOAuthResponse(**data) @@ -204,14 +207,14 @@ async def get_user_profile(user_id: str, bot_token: str) -> tuple[str, dict]: headers={"Authorization": f"Bearer {bot_token}"}, params={"user": user_id} ) - + user_data = user_response.json() username = "slack_user" # Default username - + if user_data.get("ok") and user_data.get("user"): user_info = user_data.get("user", {}) profile = user_info.get("profile", {}) - + # Try to get the best available username option if profile.get("display_name"): username = profile.get("display_name") @@ -219,12 +222,13 @@ async def get_user_profile(user_id: str, bot_token: str) -> tuple[str, dict]: username = profile.get("real_name") elif user_info.get("name"): username = user_info.get("name") - + # Remove spaces for URL safety username = username.replace(" ", "_") - + return username, user_data + async def send_welcome_message(user_id: str, username: str, bot_token: str, mcp_client_result: Dict[str, Any]): """ Send a welcome message to a Slack user. @@ -242,14 +246,14 @@ async def send_welcome_message(user_id: str, username: str, bot_token: str, mcp_ else: dashboard_url = f"{settings.WEBSITE_URL}/home/mcp-client/{client_id}" welcome_blocks = create_welcome_blocks(dashboard_url) - + client = AsyncWebClient(token=bot_token) response = await client.chat_postMessage( channel=user_id, text="Thanks for installing our app! šŸŽ‰", blocks=welcome_blocks ) - + if response["ok"]: return True, None else: @@ -257,7 +261,8 @@ async def send_welcome_message(user_id: str, username: str, bot_token: str, mcp_ return False, error_message except Exception as e: return False, str(e) - + + def create_welcome_blocks(dashboard_url: str): return [ { diff --git a/mcp_clients/slack/settings.py b/mcp-clients/src/mcp_clients/slack/settings.py similarity index 97% rename from mcp_clients/slack/settings.py rename to mcp-clients/src/mcp_clients/slack/settings.py index cbe40218..b20359c2 100644 --- a/mcp_clients/slack/settings.py +++ b/mcp-clients/src/mcp_clients/slack/settings.py @@ -1,7 +1,9 @@ import os -from pydantic_settings import BaseSettings from typing import ClassVar +from pydantic_settings import BaseSettings + + class SlackSettings(BaseSettings): """Settings for Slack integration""" @@ -39,5 +41,6 @@ class Config: case_sensitive = True extra = "ignore" + # Create a global settings instance -settings = SlackSettings() \ No newline at end of file +settings = SlackSettings() diff --git a/mcp_clients/slack_bot.py b/mcp-clients/src/mcp_clients/slack_bot.py similarity index 92% rename from mcp_clients/slack_bot.py rename to mcp-clients/src/mcp_clients/slack_bot.py index f9f0ce58..79ed83f0 100644 --- a/mcp_clients/slack_bot.py +++ b/mcp-clients/src/mcp_clients/slack_bot.py @@ -1,9 +1,9 @@ +import asyncio import logging import os -import time -import asyncio from typing import Any, Dict, List, Optional +import uvicorn from dotenv import load_dotenv from fastapi import APIRouter, FastAPI from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler @@ -11,19 +11,18 @@ from slack_bolt.authorization import AuthorizeResult from slack_sdk.errors import SlackApiError from slack_sdk.web.async_client import AsyncWebClient -import uvicorn -from base_bot import BaseBot -from llms import ChatMessage, MessageRole, TextContent, FileContent, Conversation -from mcp_client import MCPClient -from slack.context import SlackBotContext -from slack.event_routes import LoggingMiddleware, setup_http_routes -from slack.oauth_routes import setup_oauth_routes -from slack.settings import settings -from config import USE_PRODUCTION_DB +from mcp_clients.base_bot import BaseBot +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.llms.base import ChatMessage, Conversation, MessageRole, TextContent, FileContent +from mcp_clients.mcp_client import MCPClient +from mcp_clients.slack.context import SlackBotContext +from mcp_clients.slack.event_routes import LoggingMiddleware, setup_http_routes +from mcp_clients.slack.oauth_routes import setup_oauth_routes +from mcp_clients.slack.settings import settings if USE_PRODUCTION_DB: - from database.database import get_slack_auth_metadata + from mcp_clients.database.database import get_slack_auth_metadata else: # Define dummy function when database is not used async def get_slack_auth_metadata(team_id): @@ -49,17 +48,18 @@ async def get_slack_auth_metadata(team_id): "firecrawl_deep_research", ] - app = FastAPI(title="Slack MCP Bot") # Add logging middleware app.add_middleware(LoggingMiddleware) + # Add a health check endpoint -@app.get("/health") +@app.get("/api/health") async def health_check(): return {"status": "ok"} + class SlackBot(BaseBot): """ Slack-specific implementation of the bot. @@ -82,7 +82,7 @@ def __init__(self): # Create slack handler self.slack_handler: AsyncSlackRequestHandler = AsyncSlackRequestHandler(self.app) - + # Add user lock to prevent concurrent message processing self.user_locks = {} @@ -91,7 +91,7 @@ def __init__(self): # Register HTTP routes self._setup_http_routes() - + # Register event handlers self._setup_event_handlers() @@ -100,22 +100,23 @@ def _setup_oauth_routes(self) -> None: Set up OAuth routes for Slack app installation """ setup_oauth_routes(router=self.router) - + def _setup_http_routes(self) -> None: """Set up HTTP routes for the FastAPI integration""" setup_http_routes(router=self.router, slack_handler=self.slack_handler) - + def _setup_event_handlers(self) -> None: """Set up event handlers for the Slack app""" - from slack.message_handlers import register_message_handlers - from slack.interactive_handlers import register_interactive_handlers + from mcp_clients.slack.message_handlers import register_message_handlers + from mcp_clients.slack.interactive_handlers import register_interactive_handlers # Register message handlers (app mentions and DMs) register_message_handlers(self.app, self) # Register interactive handlers (buttons, etc.) register_interactive_handlers(self.app) @staticmethod - async def authorize(enterprise_id: Optional[str], team_id: Optional[str], logger: logging.Logger) -> Optional[AuthorizeResult]: + async def authorize(enterprise_id: Optional[str], team_id: Optional[str], logger: logging.Logger) -> Optional[ + AuthorizeResult]: """ The function to authorize an incoming request from Slack by checking if there is a team/user in the installation data. @@ -128,28 +129,28 @@ async def authorize(enterprise_id: Optional[str], team_id: Optional[str], logger AuthorizeResult: An authorization result object with the bot token information """ logger.info(f"Authorizing Slack request for team_id={team_id}, enterprise_id={enterprise_id}") - + if not team_id: logger.error("No team_id provided for authorization") return None - + try: # Get auth data from database using the team_id auth_data = await get_slack_auth_metadata(team_id) - + if not auth_data: logger.error(f"No authorization information found for team {team_id}") return None - + # Extract token and IDs from auth metadata bot_token = auth_data.get("access_token") bot_id = auth_data.get("app_id") bot_user_id = auth_data.get("bot_user_id") - + if not bot_token: logger.error(f"No bot token found for team {team_id}") return None - + logger.info(f"Successfully authorized request for team {team_id}") return AuthorizeResult( enterprise_id=enterprise_id, @@ -162,7 +163,6 @@ async def authorize(enterprise_id: Optional[str], team_id: Optional[str], logger logger.error(f"Error during Slack authorization: {str(e)}") return None - async def send_message(self, context: SlackBotContext, message: str) -> Optional[Dict[str, Any]]: """ Send a message on Slack. @@ -187,11 +187,11 @@ async def send_message(self, context: SlackBotContext, message: str) -> Optional "channel": context.channel_id, "text": message } - + # Reply in thread if thread_ts is available (for both DMs and public channels) if context.thread_ts: message_params["thread_ts"] = context.thread_ts - + response = await client.chat_postMessage(**message_params) return response except SlackApiError as e: @@ -211,7 +211,7 @@ async def _create_special_message_blocks(self, special_content: str) -> tuple: blocks = [] header_text = "" fallback_text = "" - + if special_content.startswith("[Calling tool"): # Extract tool name and arguments from the message content = special_content.strip("[]") @@ -223,14 +223,14 @@ async def _create_special_message_blocks(self, special_content: str) -> tuple: except: tool_name = "" tool_args = content - + header_text = "✨ MCP Server Call" fallback_text = "Tool call in progress" - + waiting_message = "ā³ _Working on your request..._" if any(tool in tool_name for tool in LONG_RUNNING_TOOLS): waiting_message = f"ā³ _This tool may take several minutes to complete..._" - + blocks = [ { "type": "header", @@ -269,7 +269,7 @@ async def _create_special_message_blocks(self, special_content: str) -> tuple: elif "still running" in special_content: header_text = "Tool Running" fallback_text = "Tool still running" - + blocks = [ { "type": "header", @@ -303,7 +303,7 @@ async def _create_special_message_blocks(self, special_content: str) -> tuple: # Default case header_text = "Special Message" fallback_text = "Special message received" - + blocks = [ { "type": "header", @@ -324,9 +324,9 @@ async def _create_special_message_blocks(self, special_content: str) -> tuple: "type": "divider" } ] - + return blocks, fallback_text - + async def _send_special_message(self, context: SlackBotContext, special_content: str) -> Optional[Dict[str, Any]]: """ Send a special message with formatted blocks. @@ -339,7 +339,7 @@ async def _send_special_message(self, context: SlackBotContext, special_content: Optional response from the API call """ blocks, fallback_text = await self._create_special_message_blocks(special_content) - + # Create a client for this specific operation if hasattr(context, 'bot_token') and context.bot_token: client = AsyncWebClient(token=context.bot_token) @@ -354,10 +354,10 @@ async def _send_special_message(self, context: SlackBotContext, special_content: return None async def process_query_with_streaming( - self, - mcp_client: MCPClient, - messages_history: List[ChatMessage], - context: SlackBotContext + self, + mcp_client: MCPClient, + messages_history: List[ChatMessage], + context: SlackBotContext ) -> Optional[Dict[str, Any]]: """ Process a query with streaming in Slack-specific way. @@ -377,7 +377,7 @@ async def process_query_with_streaming( buffer = "" async with asyncio.timeout(settings.STREAMING_TIMEOUT): async for chunk in mcp_client.process_query_stream( - messages_history, self.store_new_messages if USE_PRODUCTION_DB else None + messages_history, self.store_new_messages if USE_PRODUCTION_DB else None ): buffer += chunk # Check if the chunk contains a message split token @@ -414,7 +414,8 @@ async def process_query_with_streaming( logger.error(f"Error processing query: {e}", exc_info=True) return await self.send_message(context, f"Error processing query: {str(e)}") - async def get_messages_history(self, conversation: Conversation, slack_context: SlackBotContext, limit: int = 6) -> List[ChatMessage]: + async def get_messages_history(self, conversation: Conversation, slack_context: SlackBotContext, limit: int = 6) -> \ + List[ChatMessage]: """ Get the previous messages for the conversation. @@ -439,7 +440,7 @@ async def get_messages_history(self, conversation: Conversation, slack_context: "channel": slack_context.channel_id, "limit": limit + 1 } - + # If thread_ts is provided, get messages from that thread if hasattr(slack_context, 'thread_ts') and slack_context.thread_ts: params["ts"] = slack_context.thread_ts @@ -452,7 +453,7 @@ async def get_messages_history(self, conversation: Conversation, slack_context: logger.info(f"--- History response: {response}") if response["ok"]: messages = response["messages"] - + # Process each message for message in messages: content = [] @@ -492,19 +493,19 @@ async def get_messages_history(self, conversation: Conversation, slack_context: role=role, content=content, ) - + chat_messages.append(chat_message) - + else: logger.error(f"Error fetching Slack conversation history: {response.get('error', 'Unknown error')}") - + except Exception as e: logger.error(f"Error retrieving previous messages: {e}", exc_info=True) # Return empty list in case of error return [] - + return chat_messages - + def get_router(self) -> APIRouter: """ Get the FastAPI router for Slack endpoints. @@ -513,7 +514,7 @@ def get_router(self) -> APIRouter: FastAPI router with Slack endpoints """ return self.router - + def run(self, port: Optional[int] = None) -> None: """ Run the bot (placeholder - overridden by FastAPI integration) @@ -523,11 +524,15 @@ def run(self, port: Optional[int] = None) -> None: """ pass -# Initialize the bot and register its router with the app -bot = SlackBot() -app.include_router(bot.get_router()) + +def main(): + # Initialize the bot and register its router with the app + bot = SlackBot() + app.include_router(bot.get_router()) + port = int(os.environ.get("PORT", 8080)) + uvicorn.run(app, host="0.0.0.0", port=port) + # For local development if __name__ == "__main__": - port = int(os.environ.get("PORT", 8080)) - uvicorn.run(app, host="0.0.0.0", port=port) \ No newline at end of file + main() diff --git a/mcp_clients/sse_client.py b/mcp-clients/src/mcp_clients/sse_client.py similarity index 78% rename from mcp_clients/sse_client.py rename to mcp-clients/src/mcp_clients/sse_client.py index c04aa791..3e0e00f9 100644 --- a/mcp_clients/sse_client.py +++ b/mcp-clients/src/mcp_clients/sse_client.py @@ -10,21 +10,19 @@ import argparse import logging -import sys -import asyncio +from contextlib import AsyncExitStack from functools import partial -from urllib.parse import urlparse from typing import Optional -from contextlib import AsyncExitStack +from urllib.parse import urlparse import anyio - +import sys +from dotenv import load_dotenv from mcp.client.session import ClientSession from mcp.client.sse import sse_client from mcp.client.stdio import StdioServerParameters, stdio_client from anthropic import Anthropic -from dotenv import load_dotenv load_dotenv() # load environment variables from .env @@ -43,28 +41,28 @@ def __init__(self): self.session: Optional[ClientSession] = None self.exit_stack = AsyncExitStack() self.anthropic = Anthropic() + self.messages = [] # Store conversation history async def process_query(self, query: str) -> str: """Process a query using Claude and available tools""" - messages = [ - { - "role": "user", - "content": query - } - ] + # Add user message to conversation history + self.messages.append({ + "role": "user", + "content": query + }) response = await self.session.list_tools() - available_tools = [{ + available_tools = [{ "name": tool.name, "description": tool.description, "input_schema": tool.inputSchema } for tool in response.tools] - # Initial Claude API call + # Initial Claude API call with full conversation history response = self.anthropic.messages.create( model="claude-3-5-sonnet-20241022", max_tokens=1000, - messages=messages, + messages=self.messages, tools=available_tools ) @@ -77,12 +75,14 @@ async def process_query(self, query: str) -> str: elif content.type == 'tool_use': tool_name = content.name tool_args = content.input - + # Execute tool call result = await self.session.call_tool(tool_name, tool_args) final_text.append(f"[Calling tool {tool_name} with args {tool_args}]") + logger.info(f"calling tool {tool_name} with args {tool_args}") - messages.append({ + # Add assistant message with tool use to history + self.messages.append({ "role": "assistant", "content": [{ "type": "tool_use", @@ -91,8 +91,10 @@ async def process_query(self, query: str) -> str: "id": content.id }] }) - messages.append({ - "role": "user", + + # Add tool result to history + self.messages.append({ + "role": "user", "content": [{ "type": "tool_result", "tool_use_id": content.id, @@ -101,35 +103,48 @@ async def process_query(self, query: str) -> str: }) final_text.append(f"[Tool call result: {result.content[0].text}]") - # Get next response from Claude + logger.info(f"Tool call result: {result.content[0].text}") + + # Get next response from Claude with full history response = self.anthropic.messages.create( model="claude-3-5-sonnet-20241022", max_tokens=1000, - messages=messages, + messages=self.messages, ) final_text.append(response.content[0].text) + # Add final assistant response to history + if response.content and response.content[0].type == 'text': + self.messages.append({ + "role": "assistant", + "content": response.content[0].text + }) + return "\n".join(final_text) async def chat_loop(self): """Run an interactive chat loop""" print("\nMCP Client Started!") - print("Type your queries or 'quit' to exit.") - + print("Type your queries, 'clear' to clear history, or 'quit' to exit.") + while True: try: query = input("\nQuery: ").strip() - + if query.lower() == 'quit': break - + elif query.lower() == 'clear': + self.messages = [] + print("Message history cleared.") + continue + response = await self.process_query(query) print("\n" + response) - + except Exception as e: print(f"\nError: {str(e)}") - + async def cleanup(self): """Clean up resources""" await self.exit_stack.aclose() @@ -145,7 +160,7 @@ async def run_session(read_stream, write_stream): logger.info("Initializing session") await session.initialize() logger.info("Initialized") - + # Start the chat loop await client.chat_loop() @@ -177,4 +192,4 @@ def cli(): if __name__ == "__main__": - cli() \ No newline at end of file + cli() diff --git a/mcp-clients/src/mcp_clients/streamable_http_client.py b/mcp-clients/src/mcp_clients/streamable_http_client.py new file mode 100644 index 00000000..9be834e4 --- /dev/null +++ b/mcp-clients/src/mcp_clients/streamable_http_client.py @@ -0,0 +1,184 @@ +""" +This is a simple MCP client that uses streamable HTTP to connect to an MCP server. +It is useful for simple testing of MCP servers. + +To run the client, use the following command: +``` +python streamable_http_client.py +``` +""" + +import argparse +import json +import logging +from contextlib import AsyncExitStack +from functools import partial +from typing import Optional + +import anyio +import sys +from dotenv import load_dotenv +from mcp.client.session import ClientSession +from mcp.client.streamable_http import streamablehttp_client + +from openai import OpenAI + +load_dotenv() # load environment variables from .env + +if not sys.warnoptions: + import warnings + + warnings.simplefilter("ignore") + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("client") + + +class MCPClient: + def __init__(self): + # Initialize session and client objects + self.session: Optional[ClientSession] = None + self.exit_stack = AsyncExitStack() + self.openai = OpenAI() + self.messages = [] # Store conversation history + + async def process_query(self, query: str) -> str: + """Process a query using OpenAI and available tools""" + # Add user message to conversation history + self.messages.append({ + "role": "user", + "content": query + }) + + response = await self.session.list_tools() + available_tools = [{ + "type": "function", + "function": { + "name": tool.name, + "description": tool.description, + "parameters": tool.inputSchema + } + } for tool in response.tools] + + # Initial OpenAI API call with full conversation history + response = self.openai.chat.completions.create( + model="gpt-4o", + max_tokens=1000, + messages=self.messages, + tools=available_tools + ) + + # Process response and handle tool calls + final_text = [] + + message = response.choices[0].message + if message.content: + final_text.append(message.content) + + if message.tool_calls: + for tool_call in message.tool_calls: + tool_name = tool_call.function.name + tool_args = json.loads(tool_call.function.arguments) # Parse JSON string to dict + + # Execute tool call + result = await self.session.call_tool(tool_name, tool_args) + final_text.append(f"[Calling tool {tool_name} with args {tool_args}]") + + # Add assistant message with tool use to history + self.messages.append({ + "role": "assistant", + "content": message.content, + "tool_calls": [{ + "id": tool_call.id, + "type": "function", + "function": { + "name": tool_name, + "arguments": tool_call.function.arguments + } + }] + }) + + # Add tool result to history + self.messages.append({ + "role": "tool", + "tool_call_id": tool_call.id, + "content": str(result.content[0].text) + }) + final_text.append(f"[Tool call result: {result.content[0].text}]") + + # Get next response from OpenAI with full history + response = self.openai.chat.completions.create( + model="gpt-4o", + max_tokens=1000, + messages=self.messages, + ) + + final_text.append(response.choices[0].message.content) + + # Add final assistant response to history + if response.choices[0].message.content: + self.messages.append({ + "role": "assistant", + "content": response.choices[0].message.content + }) + + return "\n".join(final_text) + + async def chat_loop(self): + """Run an interactive chat loop""" + print("\nMCP Client Started!") + print("Type your queries, 'clear' to clear history, or 'quit' to exit.") + + while True: + try: + query = input("\nQuery: ").strip() + + if query.lower() == 'quit': + break + elif query.lower() == 'clear': + self.messages = [] + print("Message history cleared.") + continue + + response = await self.process_query(query) + print("\n" + response) + + except Exception as e: + print(f"\nError: {str(e)}") + + async def cleanup(self): + """Clean up resources""" + await self.exit_stack.aclose() + + +async def run_session(read_stream, write_stream): + client = MCPClient() + async with ClientSession(read_stream, write_stream) as session: + client.session = session + + logger.info("Initializing session") + await session.initialize() + logger.info("Initialized") + + # Start the chat loop + await client.chat_loop() + + +# example streamable http url: http://localhost:8000/mcp as per the MCP spec +async def main(url: str, args: list[str]): + # Use streamable HTTP client + async with streamablehttp_client(url) as streams: + await run_session(*streams[:2]) # Only pass read_stream and write_stream + + +def cli(): + parser = argparse.ArgumentParser() + parser.add_argument("url", help="URL to connect to") + parser.add_argument("args", nargs="*", help="Additional arguments") + + args = parser.parse_args() + anyio.run(partial(main, args.url, args.args), backend="trio") + + +if __name__ == "__main__": + cli() \ No newline at end of file diff --git a/mcp_clients/web_bot.py b/mcp-clients/src/mcp_clients/web_bot.py similarity index 91% rename from mcp_clients/web_bot.py rename to mcp-clients/src/mcp_clients/web_bot.py index 3fdc0100..db2f3736 100644 --- a/mcp_clients/web_bot.py +++ b/mcp-clients/src/mcp_clients/web_bot.py @@ -1,18 +1,17 @@ -import os -import logging import asyncio -from typing import Dict, Any, List, Optional import json +import logging +from typing import Any, List, Optional -from fastapi import FastAPI, Request, Depends, HTTPException, status, BackgroundTasks +from fastapi import FastAPI, HTTPException, status, BackgroundTasks from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse from pydantic import BaseModel -from mcp_client import MCPClient -from base_bot import BaseBot, BotContext -from llms import ChatMessage, MessageRole, TextContent, Conversation -from config import USE_PRODUCTION_DB +from mcp_clients.base_bot import BotContext, BaseBot +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.llms.base import ChatMessage, Conversation, MessageRole, TextContent +from mcp_clients.mcp_client import MCPClient # Configure logging logging.basicConfig( @@ -49,11 +48,11 @@ class WebBotContext(BotContext): """ def __init__( - self, - platform_name: str, - user_id: str, - conversation_id: Optional[str] = None, - user_message: Any = None, + self, + platform_name: str, + user_id: str, + conversation_id: Optional[str] = None, + user_message: Any = None, ): """ Initialize the Web bot context. @@ -106,9 +105,9 @@ def __init__(self): self.user_locks = {} async def send_message( - self, - context: WebBotContext, - message: str, + self, + context: WebBotContext, + message: str, ) -> Any: """ Send a message via web API. @@ -127,10 +126,10 @@ async def send_message( return {"message": message} async def process_query_with_streaming( - self, - mcp_client: MCPClient, - messages_history: List[ChatMessage], - context: WebBotContext, + self, + mcp_client: MCPClient, + messages_history: List[ChatMessage], + context: WebBotContext, ) -> StreamingResponse: """ Process a query with streaming in web-specific way. @@ -149,8 +148,8 @@ async def stream_generator(): try: async with asyncio.timeout(200): async for chunk in mcp_client.process_query_stream( - messages_history, - self.store_new_messages if USE_PRODUCTION_DB else None, + messages_history, + self.store_new_messages if USE_PRODUCTION_DB else None, ): # Check if this is a special message if "" in chunk: @@ -176,7 +175,7 @@ async def stream_generator(): ) async def get_messages_history( - self, conversation: Conversation, context: WebBotContext, limit: int = 6 + self, conversation: Conversation, context: WebBotContext, limit: int = 6 ) -> List[ChatMessage]: """ Get the previous messages for the conversation. @@ -191,7 +190,7 @@ async def get_messages_history( """ if USE_PRODUCTION_DB: - from database.database import get_messages_for_conversation + from mcp_clients.database.database import get_messages_for_conversation # Get the messages from the conversation # Limit the number of messages, considering the tool calls, we multiply by 3 diff --git a/mcp-clients/src/mcp_clients/whatsapp_bot.py b/mcp-clients/src/mcp_clients/whatsapp_bot.py new file mode 100644 index 00000000..e848a835 --- /dev/null +++ b/mcp-clients/src/mcp_clients/whatsapp_bot.py @@ -0,0 +1,522 @@ +import os +import logging +import uvicorn +import time +from typing import Dict, Any, Optional, List +from fastapi import FastAPI, Request, Response +from pywa_async import WhatsApp, types +from pywa_async.types import Message +from dotenv import load_dotenv + +from mcp_clients.base_bot import BaseBot, BotContext +from mcp_clients.config import USE_PRODUCTION_DB +from mcp_clients.llms.base import ChatMessage, Conversation, MessageRole, TextContent, FileContent +from mcp_clients.mcp_client import MCPClient + +# Load environment variables +load_dotenv() + +# Configure logger +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') + +# Environment variables +ACCESS_TOKEN = os.environ.get("WHATSAPP_ACCESS_TOKEN") +APP_ID = os.environ.get("WHATSAPP_APP_ID") +APP_SECRET = os.environ.get("WHATSAPP_APP_SECRET") +PHONE_NUMBER_ID = os.environ.get("WHATSAPP_PHONE_NUMBER_ID") +VERIFY_TOKEN = os.environ.get("WHATSAPP_VERIFY_TOKEN") +CALLBACK_URL = os.environ.get("CALLBACK_URL") + +# Define long-running tools +LONG_RUNNING_TOOLS = [ + "generate_web_reports", + "firecrawl_deep_research", +] + +# Create FastAPI app +app = FastAPI(title="WhatsApp Bot API") + +# Initialize WhatsApp client with ASYNC version +wa = WhatsApp( + phone_id=PHONE_NUMBER_ID, + token=ACCESS_TOKEN, + server=app, + callback_url=CALLBACK_URL, + verify_token=VERIFY_TOKEN, + app_id=APP_ID, + app_secret=APP_SECRET, + webhook_challenge_delay=20.0 # Increased delay for webhook challenge +) + +class WhatsAppBotContext(BotContext): + """ + WhatsApp-specific context for the bot operations. + Extends the base BotContext with WhatsApp-specific attributes. + """ + + def __init__( + self, + platform_name: str, + user_id: str, + message: Message = None, + conversation_id: Optional[str] = None, + ): + """ + Initialize the WhatsApp bot context. + + Args: + platform_name: Name of the platform ('whatsapp') + user_id: WhatsApp user ID (wa_id) + message: WhatsApp message object + conversation_id: Optional conversation ID for tracking sessions + """ + super().__init__( + platform_name=platform_name, + user_id=user_id, + user_message=message.text if message else None + ) + self.message = message + self.conversation_id = conversation_id + + def get_channel_id(self) -> Optional[str]: + """ + Get the channel ID for the current context. + For WhatsApp, we use the user_id as channel_id. + + Returns: + String representation of the channel ID + """ + return self.user_id + + def get_thread_id(self) -> Optional[str]: + """ + Get the thread ID for the current context. + WhatsApp doesn't have threads, so we return None. + + Returns: + None as WhatsApp doesn't support threading + """ + return None + + +class WhatsAppBot(BaseBot): + """ + WhatsApp-specific implementation of the bot. + Extends the base bot with WhatsApp-specific functionality. + """ + + def __init__(self): + """ + Initialize the WhatsApp bot. + """ + super().__init__(platform_name="whatsapp") + + # Add user lock to prevent concurrent message processing + self.user_locks = {} + + async def send_message( + self, + context: WhatsAppBotContext, + message: str + ) -> Any: + """ + Send a message via WhatsApp. + + Args: + context: WhatsApp bot context + message: Message text to send + + Returns: + Message object or None on failure + """ + try: + # Send message via WhatsApp client + if len(message) > 4096: + # WhatsApp has a 4096 character limit, so split long messages + parts = [message[i:i+4000] for i in range(0, len(message), 4000)] + responses = [] + for part in parts: + response = await wa.send_message( + to=context.user_id, + text=part + ) + responses.append(response) + return responses[-1] # Return the last response + else: + return await wa.send_message( + to=context.user_id, + text=message + ) + except Exception as e: + logger.error(f"Error sending message: {e}") + return None + + async def format_special_message(self, special_content: str) -> str: + """ + Format special messages for WhatsApp based on their content. + + Args: + special_content: The content of the special message + + Returns: + Formatted message string + """ + # Parse the special message + if special_content.startswith("[Calling tool"): + # Extract tool name and arguments + try: + content = special_content.strip("[]") + tool_parts = content.split("with arguments") + tool_name = tool_parts[0].replace("Calling tool", "").strip() + tool_args = tool_parts[1].strip() if len(tool_parts) > 1 else "" + + # Create waiting message based on tool type + waiting_message = "ā³ _Working on your request..._" + if any(tool in tool_name for tool in LONG_RUNNING_TOOLS): + waiting_message = f"ā³ _This tool may take several minutes to complete..._" + + # Format for WhatsApp with minimal formatting options + message = f"""✨ *MCP Server Call* + +*Tool:* {tool_name} +*Arguments:* +```{tool_args[:200]}{'...' if len(tool_args) > 200 else ''}``` + +{waiting_message} +----------------------------""" + + return message + except Exception as e: + logger.error(f"Error formatting tool call message: {e}") + return f"Processing request with tool: {special_content}" + + elif "still running" in special_content: + return f"""ā³ *Tool Still Running* + +{special_content.strip('[]')} + +_Tool is still running. Please wait..._ +----------------------------""" + + else: + return f"""ā„¹ļø *System Message* + +{special_content.strip('[]')} +----------------------------""" + + async def send_special_message(self, context: WhatsAppBotContext, special_content: str) -> Any: + """ + Handle special messages like tool calls and send formatted responses. + + Args: + context: WhatsApp bot context + special_content: The content of the special message + + Returns: + Response from WhatsApp API + """ + try: + formatted_message = await self.format_special_message(special_content) + response = await self.send_message(context, formatted_message) + logger.info(f"Sent special message to {context.user_id}: {special_content[:50]}...") + return response + except Exception as e: + logger.error(f"Error sending special message: {e}") + return None + + async def process_query_with_streaming( + self, + mcp_client: MCPClient, + messages_history: List[ChatMessage], + context: WhatsAppBotContext + ) -> Any: + """ + Process a query using simple chat completion instead of streaming for WhatsApp. + WhatsApp doesn't support streamed messages, so we'll wait for the full response. + + Args: + mcp_client: MCP client instance + messages_history: List of previous messages + context: WhatsApp bot context + + Returns: + Final message object or None + """ + try: + # Send a waiting message + await self.send_message(context, "ā³ _Processing your request..._") + + # Use non-streaming response instead of streaming + response = await mcp_client.process_query( + messages_history, + self.store_new_messages if USE_PRODUCTION_DB else None + ) + + # Send the complete response + if response: + final_response = await self.send_message(context, response) + return final_response + else: + return await self.send_message(context, "I couldn't generate a response. Please try again.") + + except Exception as e: + logger.error(f"Error processing query: {e}", exc_info=True) + return await self.send_message(context, f"Error processing query: {str(e)}") + + async def process_query_stream( + self, + mcp_client: MCPClient, + messages_history: List[ChatMessage], + context: WhatsAppBotContext + ) -> Any: + """ + Process a query using streaming responses, but batching results for WhatsApp. + This method handles streaming by collecting chunks and sending them in batches. + + Args: + mcp_client: MCP client instance + messages_history: List of previous messages + context: WhatsApp bot context + + Returns: + Final response message object + """ + try: + # Send initial waiting message + await self.send_message(context, "ā³ _Processing your request..._") + + # Initialize variables to collect streaming chunks + collected_chunks = "" + current_chunk = "" + special_messages = [] + buffer_size = 500 # Threshold for buffering text + last_send_time = time.time() + update_interval = 5.0 # Seconds between updates + + # Use MCP client's streaming generator + async for chunk in mcp_client.process_query_stream( + messages_history, + self.store_new_messages if USE_PRODUCTION_DB else None + ): + # Check if this is a special message (like tool call) + if chunk.startswith("") and chunk.endswith("\n"): + # Extract the special message content + special_content = chunk.replace("", "").split(mcp_client.get_message_split_token())[0].strip() + + # Send the current text chunk if we have any + if current_chunk: + collected_chunks += current_chunk + current_chunk = "" + if len(collected_chunks) >= buffer_size: + await self.send_message(context, collected_chunks) + collected_chunks = "" + last_send_time = time.time() + + # Send special message (like tool calls) + await self.send_special_message(context, special_content) + special_messages.append(special_content) + continue + + # Regular text chunk + current_chunk += chunk + + # Check if it's time to send a buffer update + current_time = time.time() + should_send_update = ( + (current_time - last_send_time >= update_interval and len(current_chunk) > 0) or + len(current_chunk) >= buffer_size + ) + + if should_send_update: + # Add chunk to collected text + collected_chunks += current_chunk + current_chunk = "" + + # Send intermediate update + await self.send_message(context, collected_chunks) + collected_chunks = "" + last_send_time = current_time + + # Send any remaining text + final_text = collected_chunks + current_chunk + if final_text: + final_response = await self.send_message(context, final_text) + return final_response + else: + # If no text was generated but we had special messages, send a summary + if special_messages: + summary = "āœ… *Task Completed*\n\n" + if any("Calling tool" in msg for msg in special_messages): + summary += "I've completed the requested operations using various tools." + return await self.send_message(context, summary) + else: + return await self.send_message(context, "I couldn't generate a response. Please try again.") + + except Exception as e: + logger.error(f"Error processing streaming query: {e}", exc_info=True) + error_message = f"Error processing your request: {str(e)}" + return await self.send_message(context, error_message) + + async def get_messages_history( + self, conversation: Conversation, context: WhatsAppBotContext, limit: int = 6 + ) -> List[ChatMessage]: + """ + Get the previous messages for the conversation. + For WhatsApp, in local development mode, we just use the current message. + + Args: + conversation: Conversation object + context: WhatsApp bot context + limit: Maximum number of previous messages to retrieve + + Returns: + List of ChatMessage objects + """ + if USE_PRODUCTION_DB: + # In production, use the conversation history from the database + # This would be implemented as needed, similar to SlackBot's implementation + logger.info("Database operations for message history not implemented yet") + + # For local development, just use the current message + logger.info("Using current message as history") + return [ + ChatMessage( + role=MessageRole.USER, + content=[TextContent(text=f"User says: {context.user_message}")], + ) + ] + + def run(self, port: Optional[int] = None) -> None: + """ + Run the bot (placeholder - actual running is handled by FastAPI integration) + + Args: + port: Optional port number (ignored for WhatsAppBot as it's handled by FastAPI) + """ + logger.info(f"WhatsApp bot running within FastAPI server on port {port if port else 'default'}") + pass + +# Handle incoming WhatsApp messages +@wa.on_message() +async def handle_message(client: WhatsApp, message: Message): + """Handle incoming WhatsApp messages""" + logger.info(f"Received message: {message.text} from {message.from_user.wa_id}") + + try: + # Use the message_id to mark as read and display typing indicator + await client.indicate_typing(message_id=message.id) + + # Create WhatsApp bot instance + whatsapp_bot = WhatsAppBot() + + # Create context for this message + context = WhatsAppBotContext( + platform_name="whatsapp", + user_id=message.from_user.wa_id, + message=message + ) + + # Get server URLs to connect to + server_urls = await whatsapp_bot.get_server_urls(context) + if not server_urls: + await whatsapp_bot.send_message( + context, + "Sorry, I couldn't find any MCP servers to connect to. Please check your configuration." + ) + return + + # Initialize MCP client + mcp_client = await whatsapp_bot.initialize_mcp_client(context, server_urls) + if not mcp_client: + await whatsapp_bot.send_message( + context, + "Sorry, I couldn't initialize the MCP client. Please try again later." + ) + return + + # Get conversation to get message history + conversation = mcp_client.conversation + + # Get message history + messages_history = await whatsapp_bot.get_messages_history(conversation, context) + + # Process the query with streaming + await whatsapp_bot.process_query_stream(mcp_client, messages_history, context) + + # Cleanup MCP client resources + await mcp_client.cleanup() + + logger.info(f"Processed message from {message.from_user.wa_id}") + except Exception as e: + logger.error(f"Error handling message: {e}", exc_info=True) + # Try to send an error message + try: + await client.send_message( + to=message.from_user.wa_id, + text=f"Sorry, an error occurred: {str(e)}" + ) + except: + pass + +@app.get("/") +async def root(): + """Root endpoint for health check""" + return {"status": "ok", "service": "WhatsApp Bot"} + +# Add a webhook verification endpoint +@app.get("/webhook") +async def verify_webhook(request: Request): + """ + Manually handle webhook verification + This helps to avoid issues with the automatic verification process + """ + query_params = dict(request.query_params) + + # Extract verification parameters + mode = query_params.get("hub.mode") + token = query_params.get("hub.verify_token") + challenge = query_params.get("hub.challenge") + + # Verify parameters + if mode == "subscribe" and token == VERIFY_TOKEN: + logger.info("Webhook verified successfully!") + return Response(content=challenge, media_type="text/plain") + else: + logger.error(f"Webhook verification failed. Mode: {mode}, Token: {token}") + return Response(status_code=403) + +# Add a webhook endpoint for receiving messages +@app.post("/webhook") +async def webhook(request: Request): + """ + Handle incoming webhook events + """ + body = await request.json() + logger.debug(f"Received webhook: {body}") + + # Let the WhatsApp client process the webhook + return await wa.process_webhook(body) + +def main(): + """ + Main entry point for running the WhatsApp bot as a standalone service. + """ + # Enable uvloop for better performance if available + try: + import uvloop + uvloop.install() + logger.info("Using uvloop for improved performance") + except ImportError: + logger.info("uvloop not available, using default event loop") + + # Run the FastAPI app + port = int(os.environ.get("PORT", 8080)) + logger.info(f"Starting WhatsApp bot server on port {port}") + logger.info(f"Using callback URL: {CALLBACK_URL}") + + # Using uvicorn directly + uvicorn.run(app, host="0.0.0.0", port=port) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp-clients/uv.lock b/mcp-clients/uv.lock new file mode 100644 index 00000000..4d56f20e --- /dev/null +++ b/mcp-clients/uv.lock @@ -0,0 +1,1908 @@ +version = 1 +revision = 1 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, +] + +[[package]] +name = "aiohttp" +version = "3.11.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/d9/1c4721d143e14af753f2bf5e3b681883e1f24b592c0482df6fa6e33597fa/aiohttp-3.11.16.tar.gz", hash = "sha256:16f8a2c9538c14a557b4d309ed4d0a7c60f0253e8ed7b6c9a2859a7582f8b1b8", size = 7676826 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/38/100d01cbc60553743baf0fba658cb125f8ad674a8a771f765cdc155a890d/aiohttp-3.11.16-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:911a6e91d08bb2c72938bc17f0a2d97864c531536b7832abee6429d5296e5b27", size = 704881 }, + { url = "https://files.pythonhosted.org/packages/21/ed/b4102bb6245e36591209e29f03fe87e7956e54cb604ee12e20f7eb47f994/aiohttp-3.11.16-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac13b71761e49d5f9e4d05d33683bbafef753e876e8e5a7ef26e937dd766713", size = 464564 }, + { url = "https://files.pythonhosted.org/packages/3b/e1/a9ab6c47b62ecee080eeb33acd5352b40ecad08fb2d0779bcc6739271745/aiohttp-3.11.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd36c119c5d6551bce374fcb5c19269638f8d09862445f85a5a48596fd59f4bb", size = 456548 }, + { url = "https://files.pythonhosted.org/packages/80/ad/216c6f71bdff2becce6c8776f0aa32cb0fa5d83008d13b49c3208d2e4016/aiohttp-3.11.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d489d9778522fbd0f8d6a5c6e48e3514f11be81cb0a5954bdda06f7e1594b321", size = 1691749 }, + { url = "https://files.pythonhosted.org/packages/bd/ea/7df7bcd3f4e734301605f686ffc87993f2d51b7acb6bcc9b980af223f297/aiohttp-3.11.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69a2cbd61788d26f8f1e626e188044834f37f6ae3f937bd9f08b65fc9d7e514e", size = 1736874 }, + { url = "https://files.pythonhosted.org/packages/51/41/c7724b9c87a29b7cfd1202ec6446bae8524a751473d25e2ff438bc9a02bf/aiohttp-3.11.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd464ba806e27ee24a91362ba3621bfc39dbbb8b79f2e1340201615197370f7c", size = 1786885 }, + { url = "https://files.pythonhosted.org/packages/86/b3/f61f8492fa6569fa87927ad35a40c159408862f7e8e70deaaead349e2fba/aiohttp-3.11.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce63ae04719513dd2651202352a2beb9f67f55cb8490c40f056cea3c5c355ce", size = 1698059 }, + { url = "https://files.pythonhosted.org/packages/ce/be/7097cf860a9ce8bbb0e8960704e12869e111abcd3fbd245153373079ccec/aiohttp-3.11.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b00dd520d88eac9d1768439a59ab3d145065c91a8fab97f900d1b5f802895e", size = 1626527 }, + { url = "https://files.pythonhosted.org/packages/1d/1d/aaa841c340e8c143a8d53a1f644c2a2961c58cfa26e7b398d6bf75cf5d23/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f6428fee52d2bcf96a8aa7b62095b190ee341ab0e6b1bcf50c615d7966fd45b", size = 1644036 }, + { url = "https://files.pythonhosted.org/packages/2c/88/59d870f76e9345e2b149f158074e78db457985c2b4da713038d9da3020a8/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:13ceac2c5cdcc3f64b9015710221ddf81c900c5febc505dbd8f810e770011540", size = 1685270 }, + { url = "https://files.pythonhosted.org/packages/2b/b1/c6686948d4c79c3745595efc469a9f8a43cab3c7efc0b5991be65d9e8cb8/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fadbb8f1d4140825069db3fedbbb843290fd5f5bc0a5dbd7eaf81d91bf1b003b", size = 1650852 }, + { url = "https://files.pythonhosted.org/packages/fe/94/3e42a6916fd3441721941e0f1b8438e1ce2a4c49af0e28e0d3c950c9b3c9/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6a792ce34b999fbe04a7a71a90c74f10c57ae4c51f65461a411faa70e154154e", size = 1704481 }, + { url = "https://files.pythonhosted.org/packages/b1/6d/6ab5854ff59b27075c7a8c610597d2b6c38945f9a1284ee8758bc3720ff6/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f4065145bf69de124accdd17ea5f4dc770da0a6a6e440c53f6e0a8c27b3e635c", size = 1735370 }, + { url = "https://files.pythonhosted.org/packages/73/2a/08a68eec3c99a6659067d271d7553e4d490a0828d588e1daa3970dc2b771/aiohttp-3.11.16-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa73e8c2656a3653ae6c307b3f4e878a21f87859a9afab228280ddccd7369d71", size = 1697619 }, + { url = "https://files.pythonhosted.org/packages/61/d5/fea8dbbfb0cd68fbb56f0ae913270a79422d9a41da442a624febf72d2aaf/aiohttp-3.11.16-cp312-cp312-win32.whl", hash = "sha256:f244b8e541f414664889e2c87cac11a07b918cb4b540c36f7ada7bfa76571ea2", size = 411710 }, + { url = "https://files.pythonhosted.org/packages/33/fb/41cde15fbe51365024550bf77b95a4fc84ef41365705c946da0421f0e1e0/aiohttp-3.11.16-cp312-cp312-win_amd64.whl", hash = "sha256:23a15727fbfccab973343b6d1b7181bfb0b4aa7ae280f36fd2f90f5476805682", size = 438012 }, + { url = "https://files.pythonhosted.org/packages/52/52/7c712b2d9fb4d5e5fd6d12f9ab76e52baddfee71e3c8203ca7a7559d7f51/aiohttp-3.11.16-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a3814760a1a700f3cfd2f977249f1032301d0a12c92aba74605cfa6ce9f78489", size = 698005 }, + { url = "https://files.pythonhosted.org/packages/51/3e/61057814f7247666d43ac538abcd6335b022869ade2602dab9bf33f607d2/aiohttp-3.11.16-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b751a6306f330801665ae69270a8a3993654a85569b3469662efaad6cf5cc50", size = 461106 }, + { url = "https://files.pythonhosted.org/packages/4f/85/6b79fb0ea6e913d596d5b949edc2402b20803f51b1a59e1bbc5bb7ba7569/aiohttp-3.11.16-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad497f38a0d6c329cb621774788583ee12321863cd4bd9feee1effd60f2ad133", size = 453394 }, + { url = "https://files.pythonhosted.org/packages/4b/04/e1bb3fcfbd2c26753932c759593a32299aff8625eaa0bf8ff7d9c0c34a36/aiohttp-3.11.16-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca37057625693d097543bd88076ceebeb248291df9d6ca8481349efc0b05dcd0", size = 1666643 }, + { url = "https://files.pythonhosted.org/packages/0e/27/97bc0fdd1f439b8f060beb3ba8fb47b908dc170280090801158381ad7942/aiohttp-3.11.16-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5abcbba9f4b463a45c8ca8b7720891200658f6f46894f79517e6cd11f3405ca", size = 1721948 }, + { url = "https://files.pythonhosted.org/packages/2c/4f/bc4c5119e75c05ef15c5670ef1563bbe25d4ed4893b76c57b0184d815e8b/aiohttp-3.11.16-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f420bfe862fb357a6d76f2065447ef6f484bc489292ac91e29bc65d2d7a2c84d", size = 1774454 }, + { url = "https://files.pythonhosted.org/packages/73/5b/54b42b2150bb26fdf795464aa55ceb1a49c85f84e98e6896d211eabc6670/aiohttp-3.11.16-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58ede86453a6cf2d6ce40ef0ca15481677a66950e73b0a788917916f7e35a0bb", size = 1677785 }, + { url = "https://files.pythonhosted.org/packages/10/ee/a0fe68916d3f82eae199b8535624cf07a9c0a0958c7a76e56dd21140487a/aiohttp-3.11.16-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fdec0213244c39973674ca2a7f5435bf74369e7d4e104d6c7473c81c9bcc8c4", size = 1608456 }, + { url = "https://files.pythonhosted.org/packages/8b/48/83afd779242b7cf7e1ceed2ff624a86d3221e17798061cf9a79e0b246077/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:72b1b03fb4655c1960403c131740755ec19c5898c82abd3961c364c2afd59fe7", size = 1622424 }, + { url = "https://files.pythonhosted.org/packages/6f/27/452f1d5fca1f516f9f731539b7f5faa9e9d3bf8a3a6c3cd7c4b031f20cbd/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:780df0d837276276226a1ff803f8d0fa5f8996c479aeef52eb040179f3156cbd", size = 1660943 }, + { url = "https://files.pythonhosted.org/packages/d6/e1/5c7d63143b8d00c83b958b9e78e7048c4a69903c760c1e329bf02bac57a1/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ecdb8173e6c7aa09eee342ac62e193e6904923bd232e76b4157ac0bfa670609f", size = 1622797 }, + { url = "https://files.pythonhosted.org/packages/46/9e/2ac29cca2746ee8e449e73cd2fcb3d454467393ec03a269d50e49af743f1/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a6db7458ab89c7d80bc1f4e930cc9df6edee2200127cfa6f6e080cf619eddfbd", size = 1687162 }, + { url = "https://files.pythonhosted.org/packages/ad/6b/eaa6768e02edebaf37d77f4ffb74dd55f5cbcbb6a0dbf798ccec7b0ac23b/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2540ddc83cc724b13d1838026f6a5ad178510953302a49e6d647f6e1de82bc34", size = 1718518 }, + { url = "https://files.pythonhosted.org/packages/e5/18/dda87cbad29472a51fa058d6d8257dfce168289adaeb358b86bd93af3b20/aiohttp-3.11.16-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3b4e6db8dc4879015b9955778cfb9881897339c8fab7b3676f8433f849425913", size = 1675254 }, + { url = "https://files.pythonhosted.org/packages/32/d9/d2fb08c614df401d92c12fcbc60e6e879608d5e8909ef75c5ad8d4ad8aa7/aiohttp-3.11.16-cp313-cp313-win32.whl", hash = "sha256:493910ceb2764f792db4dc6e8e4b375dae1b08f72e18e8f10f18b34ca17d0979", size = 410698 }, + { url = "https://files.pythonhosted.org/packages/ce/ed/853e36d5a33c24544cfa46585895547de152dfef0b5c79fa675f6e4b7b87/aiohttp-3.11.16-cp313-cp313-win_amd64.whl", hash = "sha256:42864e70a248f5f6a49fdaf417d9bc62d6e4d8ee9695b24c5916cb4bb666c802", size = 436395 }, +] + +[[package]] +name = "aiosignal" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anthropic" +version = "0.54.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/28/80cb9bb6e7ce77d404145b51da4257455805c17f0a6be528ff3286e3882f/anthropic-0.54.0.tar.gz", hash = "sha256:5e6f997d97ce8e70eac603c3ec2e7f23addeff953fbbb76b19430562bb6ba815", size = 312376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/b9/6ffb48e82c5e97b03cecee872d134a6b6666c2767b2d32ed709f3a60a8fe/anthropic-0.54.0-py3-none-any.whl", hash = "sha256:c1062a0a905daeec17ca9c06c401e4b3f24cb0495841d29d752568a1d4018d56", size = 288774 }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 }, +] + +[[package]] +name = "audioop-lts" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/3b/69ff8a885e4c1c42014c2765275c4bd91fe7bc9847e9d8543dbcbb09f820/audioop_lts-0.2.1.tar.gz", hash = "sha256:e81268da0baa880431b68b1308ab7257eb33f356e57a5f9b1f915dfb13dd1387", size = 30204 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/91/a219253cc6e92db2ebeaf5cf8197f71d995df6f6b16091d1f3ce62cb169d/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd1345ae99e17e6910f47ce7d52673c6a1a70820d78b67de1b7abb3af29c426a", size = 46252 }, + { url = "https://files.pythonhosted.org/packages/ec/f6/3cb21e0accd9e112d27cee3b1477cd04dafe88675c54ad8b0d56226c1e0b/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:e175350da05d2087e12cea8e72a70a1a8b14a17e92ed2022952a4419689ede5e", size = 27183 }, + { url = "https://files.pythonhosted.org/packages/ea/7e/f94c8a6a8b2571694375b4cf94d3e5e0f529e8e6ba280fad4d8c70621f27/audioop_lts-0.2.1-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:4a8dd6a81770f6ecf019c4b6d659e000dc26571b273953cef7cd1d5ce2ff3ae6", size = 26726 }, + { url = "https://files.pythonhosted.org/packages/ef/f8/a0e8e7a033b03fae2b16bc5aa48100b461c4f3a8a38af56d5ad579924a3a/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cd3c0b6f2ca25c7d2b1c3adeecbe23e65689839ba73331ebc7d893fcda7ffe", size = 80718 }, + { url = "https://files.pythonhosted.org/packages/8f/ea/a98ebd4ed631c93b8b8f2368862cd8084d75c77a697248c24437c36a6f7e/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff3f97b3372c97782e9c6d3d7fdbe83bce8f70de719605bd7ee1839cd1ab360a", size = 88326 }, + { url = "https://files.pythonhosted.org/packages/33/79/e97a9f9daac0982aa92db1199339bd393594d9a4196ad95ae088635a105f/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a351af79edefc2a1bd2234bfd8b339935f389209943043913a919df4b0f13300", size = 80539 }, + { url = "https://files.pythonhosted.org/packages/b2/d3/1051d80e6f2d6f4773f90c07e73743a1e19fcd31af58ff4e8ef0375d3a80/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aeb6f96f7f6da80354330470b9134d81b4cf544cdd1c549f2f45fe964d28059", size = 78577 }, + { url = "https://files.pythonhosted.org/packages/7a/1d/54f4c58bae8dc8c64a75071c7e98e105ddaca35449376fcb0180f6e3c9df/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c589f06407e8340e81962575fcffbba1e92671879a221186c3d4662de9fe804e", size = 82074 }, + { url = "https://files.pythonhosted.org/packages/36/89/2e78daa7cebbea57e72c0e1927413be4db675548a537cfba6a19040d52fa/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fbae5d6925d7c26e712f0beda5ed69ebb40e14212c185d129b8dfbfcc335eb48", size = 84210 }, + { url = "https://files.pythonhosted.org/packages/a5/57/3ff8a74df2ec2fa6d2ae06ac86e4a27d6412dbb7d0e0d41024222744c7e0/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_i686.whl", hash = "sha256:d2d5434717f33117f29b5691fbdf142d36573d751716249a288fbb96ba26a281", size = 85664 }, + { url = "https://files.pythonhosted.org/packages/16/01/21cc4e5878f6edbc8e54be4c108d7cb9cb6202313cfe98e4ece6064580dd/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:f626a01c0a186b08f7ff61431c01c055961ee28769591efa8800beadd27a2959", size = 93255 }, + { url = "https://files.pythonhosted.org/packages/3e/28/7f7418c362a899ac3b0bf13b1fde2d4ffccfdeb6a859abd26f2d142a1d58/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:05da64e73837f88ee5c6217d732d2584cf638003ac72df124740460531e95e47", size = 87760 }, + { url = "https://files.pythonhosted.org/packages/6d/d8/577a8be87dc7dd2ba568895045cee7d32e81d85a7e44a29000fe02c4d9d4/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:56b7a0a4dba8e353436f31a932f3045d108a67b5943b30f85a5563f4d8488d77", size = 84992 }, + { url = "https://files.pythonhosted.org/packages/ef/9a/4699b0c4fcf89936d2bfb5425f55f1a8b86dff4237cfcc104946c9cd9858/audioop_lts-0.2.1-cp313-abi3-win32.whl", hash = "sha256:6e899eb8874dc2413b11926b5fb3857ec0ab55222840e38016a6ba2ea9b7d5e3", size = 26059 }, + { url = "https://files.pythonhosted.org/packages/3a/1c/1f88e9c5dd4785a547ce5fd1eb83fff832c00cc0e15c04c1119b02582d06/audioop_lts-0.2.1-cp313-abi3-win_amd64.whl", hash = "sha256:64562c5c771fb0a8b6262829b9b4f37a7b886c01b4d3ecdbae1d629717db08b4", size = 30412 }, + { url = "https://files.pythonhosted.org/packages/c4/e9/c123fd29d89a6402ad261516f848437472ccc602abb59bba522af45e281b/audioop_lts-0.2.1-cp313-abi3-win_arm64.whl", hash = "sha256:c45317debeb64002e980077642afbd977773a25fa3dfd7ed0c84dccfc1fafcb0", size = 23578 }, + { url = "https://files.pythonhosted.org/packages/7a/99/bb664a99561fd4266687e5cb8965e6ec31ba4ff7002c3fce3dc5ef2709db/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:3827e3fce6fee4d69d96a3d00cd2ab07f3c0d844cb1e44e26f719b34a5b15455", size = 46827 }, + { url = "https://files.pythonhosted.org/packages/c4/e3/f664171e867e0768ab982715e744430cf323f1282eb2e11ebfb6ee4c4551/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:161249db9343b3c9780ca92c0be0d1ccbfecdbccac6844f3d0d44b9c4a00a17f", size = 27479 }, + { url = "https://files.pythonhosted.org/packages/a6/0d/2a79231ff54eb20e83b47e7610462ad6a2bea4e113fae5aa91c6547e7764/audioop_lts-0.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5b7b4ff9de7a44e0ad2618afdc2ac920b91f4a6d3509520ee65339d4acde5abf", size = 27056 }, + { url = "https://files.pythonhosted.org/packages/86/46/342471398283bb0634f5a6df947806a423ba74b2e29e250c7ec0e3720e4f/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e37f416adb43b0ced93419de0122b42753ee74e87070777b53c5d2241e7fab", size = 87802 }, + { url = "https://files.pythonhosted.org/packages/56/44/7a85b08d4ed55517634ff19ddfbd0af05bf8bfd39a204e4445cd0e6f0cc9/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534ce808e6bab6adb65548723c8cbe189a3379245db89b9d555c4210b4aaa9b6", size = 95016 }, + { url = "https://files.pythonhosted.org/packages/a8/2a/45edbca97ea9ee9e6bbbdb8d25613a36e16a4d1e14ae01557392f15cc8d3/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2de9b6fb8b1cf9f03990b299a9112bfdf8b86b6987003ca9e8a6c4f56d39543", size = 87394 }, + { url = "https://files.pythonhosted.org/packages/14/ae/832bcbbef2c510629593bf46739374174606e25ac7d106b08d396b74c964/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f24865991b5ed4b038add5edbf424639d1358144f4e2a3e7a84bc6ba23e35074", size = 84874 }, + { url = "https://files.pythonhosted.org/packages/26/1c/8023c3490798ed2f90dfe58ec3b26d7520a243ae9c0fc751ed3c9d8dbb69/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb3b7912ccd57ea53197943f1bbc67262dcf29802c4a6df79ec1c715d45a78", size = 88698 }, + { url = "https://files.pythonhosted.org/packages/2c/db/5379d953d4918278b1f04a5a64b2c112bd7aae8f81021009da0dcb77173c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:120678b208cca1158f0a12d667af592e067f7a50df9adc4dc8f6ad8d065a93fb", size = 90401 }, + { url = "https://files.pythonhosted.org/packages/99/6e/3c45d316705ab1aec2e69543a5b5e458d0d112a93d08994347fafef03d50/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:54cd4520fc830b23c7d223693ed3e1b4d464997dd3abc7c15dce9a1f9bd76ab2", size = 91864 }, + { url = "https://files.pythonhosted.org/packages/08/58/6a371d8fed4f34debdb532c0b00942a84ebf3e7ad368e5edc26931d0e251/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:d6bd20c7a10abcb0fb3d8aaa7508c0bf3d40dfad7515c572014da4b979d3310a", size = 98796 }, + { url = "https://files.pythonhosted.org/packages/ee/77/d637aa35497e0034ff846fd3330d1db26bc6fd9dd79c406e1341188b06a2/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f0ed1ad9bd862539ea875fb339ecb18fcc4148f8d9908f4502df28f94d23491a", size = 94116 }, + { url = "https://files.pythonhosted.org/packages/1a/60/7afc2abf46bbcf525a6ebc0305d85ab08dc2d1e2da72c48dbb35eee5b62c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e1af3ff32b8c38a7d900382646e91f2fc515fd19dea37e9392275a5cbfdbff63", size = 91520 }, + { url = "https://files.pythonhosted.org/packages/65/6d/42d40da100be1afb661fd77c2b1c0dfab08af1540df57533621aea3db52a/audioop_lts-0.2.1-cp313-cp313t-win32.whl", hash = "sha256:f51bb55122a89f7a0817d7ac2319744b4640b5b446c4c3efcea5764ea99ae509", size = 26482 }, + { url = "https://files.pythonhosted.org/packages/01/09/f08494dca79f65212f5b273aecc5a2f96691bf3307cac29acfcf84300c01/audioop_lts-0.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f0f2f336aa2aee2bce0b0dcc32bbba9178995454c7b979cf6ce086a8801e14c7", size = 30780 }, + { url = "https://files.pythonhosted.org/packages/5d/35/be73b6015511aa0173ec595fc579133b797ad532996f2998fd6b8d1bbe6b/audioop_lts-0.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:78bfb3703388c780edf900be66e07de5a3d4105ca8e8720c5c4d67927e0b15d0", size = 23918 }, +] + +[[package]] +name = "azure-ai-documentintelligence" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005 }, +] + +[[package]] +name = "azure-core" +version = "1.33.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "six" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/aa/7c9db8edd626f1a7d99d09ef7926f6f4fb34d5f9fa00dc394afdfe8e2a80/azure_core-1.33.0.tar.gz", hash = "sha256:f367aa07b5e3005fec2c1e184b882b0b039910733907d001c20fb08ebb8c0eb9", size = 295633 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/b7/76b7e144aa53bd206bf1ce34fa75350472c3f69bf30e5c8c18bc9881035d/azure_core-1.33.0-py3-none-any.whl", hash = "sha256:9b5b6d0223a1d38c37500e6971118c1e0f13f54951e6893968b38910bc9cda8f", size = 207071 }, +] + +[[package]] +name = "azure-identity" +version = "1.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/a1/f1a683672e7a88ea0e3119f57b6c7843ed52650fdcac8bfa66ed84e86e40/azure_identity-1.21.0.tar.gz", hash = "sha256:ea22ce6e6b0f429bc1b8d9212d5b9f9877bd4c82f1724bfa910760612c07a9a6", size = 266445 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/9f/1f9f3ef4f49729ee207a712a5971a9ca747f2ca47d9cbf13cf6953e3478a/azure_identity-1.21.0-py3-none-any.whl", hash = "sha256:258ea6325537352440f71b35c3dffe9d240eae4a5126c1b7ce5efd5766bd9fd9", size = 189190 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285 }, +] + +[[package]] +name = "certifi" +version = "2025.1.31" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "cobble" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/7a/a507c709be2c96e1bb6102eb7b7f4026c5e5e223ef7d745a17d239e9d844/cobble-0.1.4.tar.gz", hash = "sha256:de38be1539992c8a06e569630717c485a5f91be2192c461ea2b220607dfa78aa", size = 3805 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/e1/3714a2f371985215c219c2a70953d38e3eed81ef165aed061d21de0e998b/cobble-0.1.4-py3-none-any.whl", hash = "sha256:36c91b1655e599fd428e2b95fdd5f0da1ca2e9f1abb0bc871dec21a0e78a2b44", size = 3984 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanfriendly" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 }, + { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 }, + { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 }, + { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 }, + { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 }, + { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 }, + { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 }, + { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 }, + { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 }, + { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 }, + { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 }, + { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 }, + { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 }, + { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 }, + { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 }, + { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 }, + { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 }, + { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 }, + { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 }, + { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 }, + { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 }, + { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 }, + { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 }, + { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, +] + +[[package]] +name = "discord-py" +version = "2.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/dd/5817c7af5e614e45cdf38cbf6c3f4597590c442822a648121a34dee7fa0f/discord_py-2.5.2.tar.gz", hash = "sha256:01cd362023bfea1a4a1d43f5280b5ef00cad2c7eba80098909f98bf28e578524", size = 1054879 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/a8/dc908a0fe4cd7e3950c9fa6906f7bf2e5d92d36b432f84897185e1b77138/discord_py-2.5.2-py3-none-any.whl", hash = "sha256:81f23a17c50509ffebe0668441cb80c139e74da5115305f70e27ce821361295a", size = 1155105 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "et-xmlfile" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059 }, +] + +[[package]] +name = "fastapi" +version = "0.115.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 }, +] + +[[package]] +name = "flatbuffers" +version = "25.2.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 }, +] + +[[package]] +name = "frozenlist" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f4/d744cba2da59b5c1d88823cf9e8a6c74e4659e2b27604ed973be2a0bf5ab/frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68", size = 42831 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/8a/289b7d0de2fbac832ea80944d809759976f661557a38bb8e77db5d9f79b7/frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1", size = 160193 }, + { url = "https://files.pythonhosted.org/packages/19/80/2fd17d322aec7f430549f0669f599997174f93ee17929ea5b92781ec902c/frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29", size = 123831 }, + { url = "https://files.pythonhosted.org/packages/99/06/f5812da431273f78c6543e0b2f7de67dfd65eb0a433978b2c9c63d2205e4/frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25", size = 121862 }, + { url = "https://files.pythonhosted.org/packages/d0/31/9e61c6b5fc493cf24d54881731204d27105234d09878be1a5983182cc4a5/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576", size = 316361 }, + { url = "https://files.pythonhosted.org/packages/9d/55/22ca9362d4f0222324981470fd50192be200154d51509ee6eb9baa148e96/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8", size = 307115 }, + { url = "https://files.pythonhosted.org/packages/ae/39/4fff42920a57794881e7bb3898dc7f5f539261711ea411b43bba3cde8b79/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9", size = 322505 }, + { url = "https://files.pythonhosted.org/packages/55/f2/88c41f374c1e4cf0092a5459e5f3d6a1e17ed274c98087a76487783df90c/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e", size = 322666 }, + { url = "https://files.pythonhosted.org/packages/75/51/034eeb75afdf3fd03997856195b500722c0b1a50716664cde64e28299c4b/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590", size = 302119 }, + { url = "https://files.pythonhosted.org/packages/2b/a6/564ecde55ee633270a793999ef4fd1d2c2b32b5a7eec903b1012cb7c5143/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103", size = 316226 }, + { url = "https://files.pythonhosted.org/packages/f1/c8/6c0682c32377f402b8a6174fb16378b683cf6379ab4d2827c580892ab3c7/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c", size = 312788 }, + { url = "https://files.pythonhosted.org/packages/b6/b8/10fbec38f82c5d163ca1750bfff4ede69713badf236a016781cf1f10a0f0/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821", size = 325914 }, + { url = "https://files.pythonhosted.org/packages/62/ca/2bf4f3a1bd40cdedd301e6ecfdbb291080d5afc5f9ce350c0739f773d6b9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70", size = 305283 }, + { url = "https://files.pythonhosted.org/packages/09/64/20cc13ccf94abc2a1f482f74ad210703dc78a590d0b805af1c9aa67f76f9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f", size = 319264 }, + { url = "https://files.pythonhosted.org/packages/20/ff/86c6a2bbe98cfc231519f5e6d712a0898488ceac804a917ce014f32e68f6/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046", size = 326482 }, + { url = "https://files.pythonhosted.org/packages/2f/da/8e381f66367d79adca245d1d71527aac774e30e291d41ef161ce2d80c38e/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770", size = 318248 }, + { url = "https://files.pythonhosted.org/packages/39/24/1a1976563fb476ab6f0fa9fefaac7616a4361dbe0461324f9fd7bf425dbe/frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc", size = 115161 }, + { url = "https://files.pythonhosted.org/packages/80/2e/fb4ed62a65f8cd66044706b1013f0010930d8cbb0729a2219561ea075434/frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878", size = 120548 }, + { url = "https://files.pythonhosted.org/packages/6f/e5/04c7090c514d96ca00887932417f04343ab94904a56ab7f57861bf63652d/frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e", size = 158182 }, + { url = "https://files.pythonhosted.org/packages/e9/8f/60d0555c61eec855783a6356268314d204137f5e0c53b59ae2fc28938c99/frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117", size = 122838 }, + { url = "https://files.pythonhosted.org/packages/5a/a7/d0ec890e3665b4b3b7c05dc80e477ed8dc2e2e77719368e78e2cd9fec9c8/frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4", size = 120980 }, + { url = "https://files.pythonhosted.org/packages/cc/19/9b355a5e7a8eba903a008579964192c3e427444752f20b2144b10bb336df/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3", size = 305463 }, + { url = "https://files.pythonhosted.org/packages/9c/8d/5b4c758c2550131d66935ef2fa700ada2461c08866aef4229ae1554b93ca/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1", size = 297985 }, + { url = "https://files.pythonhosted.org/packages/48/2c/537ec09e032b5865715726b2d1d9813e6589b571d34d01550c7aeaad7e53/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c", size = 311188 }, + { url = "https://files.pythonhosted.org/packages/31/2f/1aa74b33f74d54817055de9a4961eff798f066cdc6f67591905d4fc82a84/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45", size = 311874 }, + { url = "https://files.pythonhosted.org/packages/bf/f0/cfec18838f13ebf4b37cfebc8649db5ea71a1b25dacd691444a10729776c/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f", size = 291897 }, + { url = "https://files.pythonhosted.org/packages/ea/a5/deb39325cbbea6cd0a46db8ccd76150ae2fcbe60d63243d9df4a0b8c3205/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85", size = 305799 }, + { url = "https://files.pythonhosted.org/packages/78/22/6ddec55c5243a59f605e4280f10cee8c95a449f81e40117163383829c241/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8", size = 302804 }, + { url = "https://files.pythonhosted.org/packages/5d/b7/d9ca9bab87f28855063c4d202936800219e39db9e46f9fb004d521152623/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f", size = 316404 }, + { url = "https://files.pythonhosted.org/packages/a6/3a/1255305db7874d0b9eddb4fe4a27469e1fb63720f1fc6d325a5118492d18/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f", size = 295572 }, + { url = "https://files.pythonhosted.org/packages/2a/f2/8d38eeee39a0e3a91b75867cc102159ecccf441deb6ddf67be96d3410b84/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6", size = 307601 }, + { url = "https://files.pythonhosted.org/packages/38/04/80ec8e6b92f61ef085422d7b196822820404f940950dde5b2e367bede8bc/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188", size = 314232 }, + { url = "https://files.pythonhosted.org/packages/3a/58/93b41fb23e75f38f453ae92a2f987274c64637c450285577bd81c599b715/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e", size = 308187 }, + { url = "https://files.pythonhosted.org/packages/6a/a2/e64df5c5aa36ab3dee5a40d254f3e471bb0603c225f81664267281c46a2d/frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4", size = 114772 }, + { url = "https://files.pythonhosted.org/packages/a0/77/fead27441e749b2d574bb73d693530d59d520d4b9e9679b8e3cb779d37f2/frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd", size = 119847 }, + { url = "https://files.pythonhosted.org/packages/df/bd/cc6d934991c1e5d9cafda83dfdc52f987c7b28343686aef2e58a9cf89f20/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64", size = 174937 }, + { url = "https://files.pythonhosted.org/packages/f2/a2/daf945f335abdbfdd5993e9dc348ef4507436936ab3c26d7cfe72f4843bf/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91", size = 136029 }, + { url = "https://files.pythonhosted.org/packages/51/65/4c3145f237a31247c3429e1c94c384d053f69b52110a0d04bfc8afc55fb2/frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd", size = 134831 }, + { url = "https://files.pythonhosted.org/packages/77/38/03d316507d8dea84dfb99bdd515ea245628af964b2bf57759e3c9205cc5e/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2", size = 392981 }, + { url = "https://files.pythonhosted.org/packages/37/02/46285ef9828f318ba400a51d5bb616ded38db8466836a9cfa39f3903260b/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506", size = 371999 }, + { url = "https://files.pythonhosted.org/packages/0d/64/1212fea37a112c3c5c05bfb5f0a81af4836ce349e69be75af93f99644da9/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0", size = 392200 }, + { url = "https://files.pythonhosted.org/packages/81/ce/9a6ea1763e3366e44a5208f76bf37c76c5da570772375e4d0be85180e588/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0", size = 390134 }, + { url = "https://files.pythonhosted.org/packages/bc/36/939738b0b495b2c6d0c39ba51563e453232813042a8d908b8f9544296c29/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e", size = 365208 }, + { url = "https://files.pythonhosted.org/packages/b4/8b/939e62e93c63409949c25220d1ba8e88e3960f8ef6a8d9ede8f94b459d27/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c", size = 385548 }, + { url = "https://files.pythonhosted.org/packages/62/38/22d2873c90102e06a7c5a3a5b82ca47e393c6079413e8a75c72bff067fa8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b", size = 391123 }, + { url = "https://files.pythonhosted.org/packages/44/78/63aaaf533ee0701549500f6d819be092c6065cb5c577edb70c09df74d5d0/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad", size = 394199 }, + { url = "https://files.pythonhosted.org/packages/54/45/71a6b48981d429e8fbcc08454dc99c4c2639865a646d549812883e9c9dd3/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215", size = 373854 }, + { url = "https://files.pythonhosted.org/packages/3f/f3/dbf2a5e11736ea81a66e37288bf9f881143a7822b288a992579ba1b4204d/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2", size = 395412 }, + { url = "https://files.pythonhosted.org/packages/b3/f1/c63166806b331f05104d8ea385c4acd511598568b1f3e4e8297ca54f2676/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911", size = 394936 }, + { url = "https://files.pythonhosted.org/packages/ef/ea/4f3e69e179a430473eaa1a75ff986526571215fefc6b9281cdc1f09a4eb8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497", size = 391459 }, + { url = "https://files.pythonhosted.org/packages/d3/c3/0fc2c97dea550df9afd072a37c1e95421652e3206bbeaa02378b24c2b480/frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f", size = 128797 }, + { url = "https://files.pythonhosted.org/packages/ae/f5/79c9320c5656b1965634fe4be9c82b12a3305bdbc58ad9cb941131107b20/frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348", size = 134709 }, + { url = "https://files.pythonhosted.org/packages/71/3e/b04a0adda73bd52b390d730071c0d577073d3d26740ee1bad25c3ad0f37b/frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191", size = 12404 }, +] + +[[package]] +name = "gotrue" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "pytest-mock" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/97/577c6d67f2d3687199ba7c5628af65108f346a15877c93831081ab67a341/gotrue-2.12.0.tar.gz", hash = "sha256:b9ea164ee52964d8364c550cde16dd0e9576241a4cffeaa52eca339f61d1d14b", size = 37883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/5c/fe0dd370294c782fc1f627bb7e3eedd87c3d4d7f8d2b39fe8dd63c3096a8/gotrue-2.12.0-py3-none-any.whl", hash = "sha256:de94928eebb42d7d9672dbe4fbd0b51140a45051a31626a06dad2ad44a9a976a", size = 43649 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "h2" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, +] + +[[package]] +name = "httpcore" +version = "1.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, +] + +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, +] + +[[package]] +name = "isodate" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 }, +] + +[[package]] +name = "jiter" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/c2/e4562507f52f0af7036da125bb699602ead37a2332af0788f8e0a3417f36/jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893", size = 162604 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/d7/c55086103d6f29b694ec79156242304adf521577530d9031317ce5338c59/jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11", size = 309203 }, + { url = "https://files.pythonhosted.org/packages/b0/01/f775dfee50beb420adfd6baf58d1c4d437de41c9b666ddf127c065e5a488/jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e", size = 319678 }, + { url = "https://files.pythonhosted.org/packages/ab/b8/09b73a793714726893e5d46d5c534a63709261af3d24444ad07885ce87cb/jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2", size = 341816 }, + { url = "https://files.pythonhosted.org/packages/35/6f/b8f89ec5398b2b0d344257138182cc090302854ed63ed9c9051e9c673441/jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75", size = 364152 }, + { url = "https://files.pythonhosted.org/packages/9b/ca/978cc3183113b8e4484cc7e210a9ad3c6614396e7abd5407ea8aa1458eef/jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d", size = 406991 }, + { url = "https://files.pythonhosted.org/packages/13/3a/72861883e11a36d6aa314b4922125f6ae90bdccc225cd96d24cc78a66385/jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42", size = 395824 }, + { url = "https://files.pythonhosted.org/packages/87/67/22728a86ef53589c3720225778f7c5fdb617080e3deaed58b04789418212/jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc", size = 351318 }, + { url = "https://files.pythonhosted.org/packages/69/b9/f39728e2e2007276806d7a6609cda7fac44ffa28ca0d02c49a4f397cc0d9/jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc", size = 384591 }, + { url = "https://files.pythonhosted.org/packages/eb/8f/8a708bc7fd87b8a5d861f1c118a995eccbe6d672fe10c9753e67362d0dd0/jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e", size = 520746 }, + { url = "https://files.pythonhosted.org/packages/95/1e/65680c7488bd2365dbd2980adaf63c562d3d41d3faac192ebc7ef5b4ae25/jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d", size = 512754 }, + { url = "https://files.pythonhosted.org/packages/78/f3/fdc43547a9ee6e93c837685da704fb6da7dba311fc022e2766d5277dfde5/jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06", size = 207075 }, + { url = "https://files.pythonhosted.org/packages/cd/9d/742b289016d155f49028fe1bfbeb935c9bf0ffeefdf77daf4a63a42bb72b/jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0", size = 207999 }, + { url = "https://files.pythonhosted.org/packages/e7/1b/4cd165c362e8f2f520fdb43245e2b414f42a255921248b4f8b9c8d871ff1/jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7", size = 308197 }, + { url = "https://files.pythonhosted.org/packages/13/aa/7a890dfe29c84c9a82064a9fe36079c7c0309c91b70c380dc138f9bea44a/jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b", size = 318160 }, + { url = "https://files.pythonhosted.org/packages/6a/38/5888b43fc01102f733f085673c4f0be5a298f69808ec63de55051754e390/jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69", size = 341259 }, + { url = "https://files.pythonhosted.org/packages/3d/5e/bbdbb63305bcc01006de683b6228cd061458b9b7bb9b8d9bc348a58e5dc2/jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103", size = 363730 }, + { url = "https://files.pythonhosted.org/packages/75/85/53a3edc616992fe4af6814c25f91ee3b1e22f7678e979b6ea82d3bc0667e/jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635", size = 405126 }, + { url = "https://files.pythonhosted.org/packages/ae/b3/1ee26b12b2693bd3f0b71d3188e4e5d817b12e3c630a09e099e0a89e28fa/jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4", size = 393668 }, + { url = "https://files.pythonhosted.org/packages/11/87/e084ce261950c1861773ab534d49127d1517b629478304d328493f980791/jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d", size = 352350 }, + { url = "https://files.pythonhosted.org/packages/f0/06/7dca84b04987e9df563610aa0bc154ea176e50358af532ab40ffb87434df/jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3", size = 384204 }, + { url = "https://files.pythonhosted.org/packages/16/2f/82e1c6020db72f397dd070eec0c85ebc4df7c88967bc86d3ce9864148f28/jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5", size = 520322 }, + { url = "https://files.pythonhosted.org/packages/36/fd/4f0cd3abe83ce208991ca61e7e5df915aa35b67f1c0633eb7cf2f2e88ec7/jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d", size = 512184 }, + { url = "https://files.pythonhosted.org/packages/a0/3c/8a56f6d547731a0b4410a2d9d16bf39c861046f91f57c98f7cab3d2aa9ce/jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53", size = 206504 }, + { url = "https://files.pythonhosted.org/packages/f4/1c/0c996fd90639acda75ed7fa698ee5fd7d80243057185dc2f63d4c1c9f6b9/jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7", size = 204943 }, + { url = "https://files.pythonhosted.org/packages/78/0f/77a63ca7aa5fed9a1b9135af57e190d905bcd3702b36aca46a01090d39ad/jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001", size = 317281 }, + { url = "https://files.pythonhosted.org/packages/f9/39/a3a1571712c2bf6ec4c657f0d66da114a63a2e32b7e4eb8e0b83295ee034/jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a", size = 350273 }, + { url = "https://files.pythonhosted.org/packages/ee/47/3729f00f35a696e68da15d64eb9283c330e776f3b5789bac7f2c0c4df209/jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf", size = 206867 }, +] + +[[package]] +name = "lxml" +version = "5.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212 }, + { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439 }, + { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146 }, + { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082 }, + { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300 }, + { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655 }, + { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795 }, + { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791 }, + { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807 }, + { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213 }, + { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694 }, + { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865 }, + { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383 }, + { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864 }, + { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819 }, + { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177 }, + { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134 }, + { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598 }, + { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586 }, + { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447 }, + { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583 }, + { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684 }, + { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797 }, + { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302 }, + { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247 }, + { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824 }, + { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079 }, + { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041 }, + { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761 }, + { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209 }, + { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231 }, + { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899 }, + { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315 }, + { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639 }, +] + +[[package]] +name = "magika" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "numpy" }, + { name = "onnxruntime" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/18/ea70f6abd36f455037340f12c8125918c726d08cd6e01f0b76b6884e0c38/magika-0.6.1.tar.gz", hash = "sha256:e3dd22c73936630b1cd79d0f412d6d9a53dc99ba5e3709b1ac53f56bc998e635", size = 3030234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/be/c9f7bb9ee94abe8d344b660672001313e459c67b867b24abe32d5c80a9ce/magika-0.6.1-py3-none-any.whl", hash = "sha256:15838d2469f1394d8e9598bc7fceea1ede7f35aebe9675c6b45c6b5c48315931", size = 2968516 }, + { url = "https://files.pythonhosted.org/packages/3c/b9/016b174520e81faef5edb31b6c7a73966dc84ee33acd23a2e7b775df7ba4/magika-0.6.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:dadd036296a2e4840fd48fa0712848fe122da438e8f607dc8f19ca4663c359dc", size = 12408519 }, + { url = "https://files.pythonhosted.org/packages/02/b7/e7dfeb235823a82d676c68a748541c24db0249b854f945f6e3cec11c1b7e/magika-0.6.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:133c0e1a844361de86ca2dd7c530e38b324e86177d30c52e36fd82101c190b5c", size = 15089294 }, + { url = "https://files.pythonhosted.org/packages/64/f0/bec5bff0125d08c1bc3baef88beeb910121085249f67b5994ea961615b55/magika-0.6.1-py3-none-win_amd64.whl", hash = "sha256:0342b6230ea9aea7ab4b8fa92e1b46f1cc62e724d452ee8d6821a37f56738d22", size = 12378455 }, +] + +[[package]] +name = "mammoth" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cobble" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/a6/27a13ba068cf3ff764d631b8dd71dee1b33040aa8c143f66ce902b7d1da0/mammoth-1.9.0.tar.gz", hash = "sha256:74f5dae10ca240fd9b7a0e1a6deaebe0aad23bc590633ef6f5e868aa9b7042a6", size = 50906 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/ab/f8e63fcabc127c6efd68b03633c189ee799a5304fa96c036a325a2894bcb/mammoth-1.9.0-py2.py3-none-any.whl", hash = "sha256:0eea277316586f0ca65d86834aec4de5a0572c83ec54b4991f9bb520a891150f", size = 52901 }, +] + +[[package]] +name = "markdownify" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/78/c48fed23c7aebc2c16049062e72de1da3220c274de59d28c942acdc9ffb2/markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd", size = 17127 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/11/b751af7ad41b254a802cf52f7bc1fca7cabe2388132f2ce60a1a6b9b9622/markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef", size = 13901 }, +] + +[[package]] +name = "markitdown" +version = "0.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "charset-normalizer" }, + { name = "magika" }, + { name = "markdownify" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/e8/83669ba97718bbbccd4c432b763d22783df4c8218e770717151acf01e85b/markitdown-0.1.1.tar.gz", hash = "sha256:da97a55a45a3d775ea758e88a344d5cac94ee97115fb0293f99027d32c2fc3f6", size = 31475 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/8a/c1f85ee609de5d45f80d0213bebf6664f76ab406e9d57709e684a4a436ba/markitdown-0.1.1-py3-none-any.whl", hash = "sha256:98ea8c009fe174b37ef933e00f4364214e8fed35691178b8521b13604d0c4a58", size = 48230 }, +] + +[package.optional-dependencies] +all = [ + { name = "azure-ai-documentintelligence" }, + { name = "azure-identity" }, + { name = "mammoth" }, + { name = "olefile" }, + { name = "openpyxl" }, + { name = "pandas" }, + { name = "pdfminer-six" }, + { name = "pydub" }, + { name = "python-pptx" }, + { name = "speechrecognition" }, + { name = "xlrd" }, + { name = "youtube-transcript-api" }, +] + +[[package]] +name = "mcp" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/f2/dc2450e566eeccf92d89a00c3e813234ad58e2ba1e31d11467a09ac4f3b9/mcp-1.9.4.tar.gz", hash = "sha256:cfb0bcd1a9535b42edaef89947b9e18a8feb49362e1cc059d6e7fc636f2cb09f", size = 333294 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/fc/80e655c955137393c443842ffcc4feccab5b12fa7cb8de9ced90f90e6998/mcp-1.9.4-py3-none-any.whl", hash = "sha256:7fcf36b62936adb8e63f89346bccca1268eeca9bf6dfb562ee10b1dfbda9dac0", size = 130232 }, +] + +[[package]] +name = "mcp-clients" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "anthropic" }, + { name = "anyio" }, + { name = "discord-py" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "markitdown", extra = ["all"] }, + { name = "mcp" }, + { name = "openai" }, + { name = "python-dotenv" }, + { name = "pywa" }, + { name = "slack-bolt" }, + { name = "supabase" }, + { name = "trio" }, + { name = "uvicorn" }, + { name = "uvloop" }, +] + +[package.metadata] +requires-dist = [ + { name = "anthropic", specifier = ">=0.54.0" }, + { name = "anyio", specifier = ">=4.9.0" }, + { name = "discord-py", specifier = ">=2.3.0" }, + { name = "fastapi", specifier = ">=0.115.12" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "markitdown", extras = ["all"] }, + { name = "mcp", specifier = ">=1.9.3" }, + { name = "openai", specifier = ">=1.72.0" }, + { name = "python-dotenv", specifier = ">=1.1.0" }, + { name = "pywa", specifier = ">=0.7.0" }, + { name = "slack-bolt", specifier = ">=1.23.0" }, + { name = "supabase", specifier = ">=2.15.0" }, + { name = "trio", specifier = ">=0.22.0" }, + { name = "uvicorn", specifier = ">=0.34.0" }, + { name = "uvloop", specifier = ">=0.17.0" }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, +] + +[[package]] +name = "msal" +version = "1.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/5f/ef42ef25fba682e83a8ee326a1a788e60c25affb58d014495349e37bce50/msal-1.32.0.tar.gz", hash = "sha256:5445fe3af1da6be484991a7ab32eaa82461dc2347de105b76af92c610c3335c2", size = 149817 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/5a/2e663ef56a5d89eba962941b267ebe5be8c5ea340a9929d286e2f5fac505/msal-1.32.0-py3-none-any.whl", hash = "sha256:9dbac5384a10bbbf4dae5c7ea0d707d14e087b92c5aa4954b3feaa2d1aa0bcb7", size = 114655 }, +] + +[[package]] +name = "msal-extensions" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583 }, +] + +[[package]] +name = "multidict" +version = "6.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/2c/e367dfb4c6538614a0c9453e510d75d66099edf1c4e69da1b5ce691a1931/multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec", size = 89372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/bb/3abdaf8fe40e9226ce8a2ba5ecf332461f7beec478a455d6587159f1bf92/multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676", size = 64019 }, + { url = "https://files.pythonhosted.org/packages/7e/b5/1b2e8de8217d2e89db156625aa0fe4a6faad98972bfe07a7b8c10ef5dd6b/multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1", size = 37925 }, + { url = "https://files.pythonhosted.org/packages/b4/e2/3ca91c112644a395c8eae017144c907d173ea910c913ff8b62549dcf0bbf/multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a", size = 37008 }, + { url = "https://files.pythonhosted.org/packages/60/23/79bc78146c7ac8d1ac766b2770ca2e07c2816058b8a3d5da6caed8148637/multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054", size = 224374 }, + { url = "https://files.pythonhosted.org/packages/86/35/77950ed9ebd09136003a85c1926ba42001ca5be14feb49710e4334ee199b/multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc", size = 230869 }, + { url = "https://files.pythonhosted.org/packages/49/97/2a33c6e7d90bc116c636c14b2abab93d6521c0c052d24bfcc231cbf7f0e7/multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07", size = 231949 }, + { url = "https://files.pythonhosted.org/packages/56/ce/e9b5d9fcf854f61d6686ada7ff64893a7a5523b2a07da6f1265eaaea5151/multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde", size = 231032 }, + { url = "https://files.pythonhosted.org/packages/f0/ac/7ced59dcdfeddd03e601edb05adff0c66d81ed4a5160c443e44f2379eef0/multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c", size = 223517 }, + { url = "https://files.pythonhosted.org/packages/db/e6/325ed9055ae4e085315193a1b58bdb4d7fc38ffcc1f4975cfca97d015e17/multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae", size = 216291 }, + { url = "https://files.pythonhosted.org/packages/fa/84/eeee6d477dd9dcb7691c3bb9d08df56017f5dd15c730bcc9383dcf201cf4/multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3", size = 228982 }, + { url = "https://files.pythonhosted.org/packages/82/94/4d1f3e74e7acf8b0c85db350e012dcc61701cd6668bc2440bb1ecb423c90/multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507", size = 226823 }, + { url = "https://files.pythonhosted.org/packages/09/f0/1e54b95bda7cd01080e5732f9abb7b76ab5cc795b66605877caeb2197476/multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427", size = 222714 }, + { url = "https://files.pythonhosted.org/packages/e7/a2/f6cbca875195bd65a3e53b37ab46486f3cc125bdeab20eefe5042afa31fb/multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731", size = 233739 }, + { url = "https://files.pythonhosted.org/packages/79/68/9891f4d2b8569554723ddd6154375295f789dc65809826c6fb96a06314fd/multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713", size = 230809 }, + { url = "https://files.pythonhosted.org/packages/e6/72/a7be29ba1e87e4fc5ceb44dabc7940b8005fd2436a332a23547709315f70/multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a", size = 226934 }, + { url = "https://files.pythonhosted.org/packages/12/c1/259386a9ad6840ff7afc686da96808b503d152ac4feb3a96c651dc4f5abf/multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124", size = 35242 }, + { url = "https://files.pythonhosted.org/packages/06/24/c8fdff4f924d37225dc0c56a28b1dca10728fc2233065fafeb27b4b125be/multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db", size = 38635 }, + { url = "https://files.pythonhosted.org/packages/6c/4b/86fd786d03915c6f49998cf10cd5fe6b6ac9e9a071cb40885d2e080fb90d/multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474", size = 63831 }, + { url = "https://files.pythonhosted.org/packages/45/05/9b51fdf7aef2563340a93be0a663acba2c428c4daeaf3960d92d53a4a930/multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd", size = 37888 }, + { url = "https://files.pythonhosted.org/packages/0b/43/53fc25394386c911822419b522181227ca450cf57fea76e6188772a1bd91/multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b", size = 36852 }, + { url = "https://files.pythonhosted.org/packages/8a/68/7b99c751e822467c94a235b810a2fd4047d4ecb91caef6b5c60116991c4b/multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3", size = 223644 }, + { url = "https://files.pythonhosted.org/packages/80/1b/d458d791e4dd0f7e92596667784fbf99e5c8ba040affe1ca04f06b93ae92/multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac", size = 230446 }, + { url = "https://files.pythonhosted.org/packages/e2/46/9793378d988905491a7806d8987862dc5a0bae8a622dd896c4008c7b226b/multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790", size = 231070 }, + { url = "https://files.pythonhosted.org/packages/a7/b8/b127d3e1f8dd2a5bf286b47b24567ae6363017292dc6dec44656e6246498/multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb", size = 229956 }, + { url = "https://files.pythonhosted.org/packages/0c/93/f70a4c35b103fcfe1443059a2bb7f66e5c35f2aea7804105ff214f566009/multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0", size = 222599 }, + { url = "https://files.pythonhosted.org/packages/63/8c/e28e0eb2fe34921d6aa32bfc4ac75b09570b4d6818cc95d25499fe08dc1d/multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9", size = 216136 }, + { url = "https://files.pythonhosted.org/packages/72/f5/fbc81f866585b05f89f99d108be5d6ad170e3b6c4d0723d1a2f6ba5fa918/multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8", size = 228139 }, + { url = "https://files.pythonhosted.org/packages/bb/ba/7d196bad6b85af2307d81f6979c36ed9665f49626f66d883d6c64d156f78/multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1", size = 226251 }, + { url = "https://files.pythonhosted.org/packages/cc/e2/fae46a370dce79d08b672422a33df721ec8b80105e0ea8d87215ff6b090d/multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817", size = 221868 }, + { url = "https://files.pythonhosted.org/packages/26/20/bbc9a3dec19d5492f54a167f08546656e7aef75d181d3d82541463450e88/multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d", size = 233106 }, + { url = "https://files.pythonhosted.org/packages/ee/8d/f30ae8f5ff7a2461177f4d8eb0d8f69f27fb6cfe276b54ec4fd5a282d918/multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9", size = 230163 }, + { url = "https://files.pythonhosted.org/packages/15/e9/2833f3c218d3c2179f3093f766940ded6b81a49d2e2f9c46ab240d23dfec/multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8", size = 225906 }, + { url = "https://files.pythonhosted.org/packages/f1/31/6edab296ac369fd286b845fa5dd4c409e63bc4655ed8c9510fcb477e9ae9/multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3", size = 35238 }, + { url = "https://files.pythonhosted.org/packages/23/57/2c0167a1bffa30d9a1383c3dab99d8caae985defc8636934b5668830d2ef/multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5", size = 38799 }, + { url = "https://files.pythonhosted.org/packages/c9/13/2ead63b9ab0d2b3080819268acb297bd66e238070aa8d42af12b08cbee1c/multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6", size = 68642 }, + { url = "https://files.pythonhosted.org/packages/85/45/f1a751e1eede30c23951e2ae274ce8fad738e8a3d5714be73e0a41b27b16/multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c", size = 40028 }, + { url = "https://files.pythonhosted.org/packages/a7/29/fcc53e886a2cc5595cc4560df333cb9630257bda65003a7eb4e4e0d8f9c1/multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756", size = 39424 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/056c81119d8b88703971f937b371795cab1407cd3c751482de5bfe1a04a9/multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375", size = 226178 }, + { url = "https://files.pythonhosted.org/packages/a3/79/3b7e5fea0aa80583d3a69c9d98b7913dfd4fbc341fb10bb2fb48d35a9c21/multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be", size = 222617 }, + { url = "https://files.pythonhosted.org/packages/06/db/3ed012b163e376fc461e1d6a67de69b408339bc31dc83d39ae9ec3bf9578/multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea", size = 227919 }, + { url = "https://files.pythonhosted.org/packages/b1/db/0433c104bca380989bc04d3b841fc83e95ce0c89f680e9ea4251118b52b6/multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8", size = 226097 }, + { url = "https://files.pythonhosted.org/packages/c2/95/910db2618175724dd254b7ae635b6cd8d2947a8b76b0376de7b96d814dab/multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02", size = 220706 }, + { url = "https://files.pythonhosted.org/packages/d1/af/aa176c6f5f1d901aac957d5258d5e22897fe13948d1e69063ae3d5d0ca01/multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124", size = 211728 }, + { url = "https://files.pythonhosted.org/packages/e7/42/d51cc5fc1527c3717d7f85137d6c79bb7a93cd214c26f1fc57523774dbb5/multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44", size = 226276 }, + { url = "https://files.pythonhosted.org/packages/28/6b/d836dea45e0b8432343ba4acf9a8ecaa245da4c0960fb7ab45088a5e568a/multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b", size = 212069 }, + { url = "https://files.pythonhosted.org/packages/55/34/0ee1a7adb3560e18ee9289c6e5f7db54edc312b13e5c8263e88ea373d12c/multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504", size = 217858 }, + { url = "https://files.pythonhosted.org/packages/04/08/586d652c2f5acefe0cf4e658eedb4d71d4ba6dfd4f189bd81b400fc1bc6b/multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf", size = 226988 }, + { url = "https://files.pythonhosted.org/packages/82/e3/cc59c7e2bc49d7f906fb4ffb6d9c3a3cf21b9f2dd9c96d05bef89c2b1fd1/multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4", size = 220435 }, + { url = "https://files.pythonhosted.org/packages/e0/32/5c3a556118aca9981d883f38c4b1bfae646f3627157f70f4068e5a648955/multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4", size = 221494 }, + { url = "https://files.pythonhosted.org/packages/b9/3b/1599631f59024b75c4d6e3069f4502409970a336647502aaf6b62fb7ac98/multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5", size = 41775 }, + { url = "https://files.pythonhosted.org/packages/e8/4e/09301668d675d02ca8e8e1a3e6be046619e30403f5ada2ed5b080ae28d02/multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208", size = 45946 }, + { url = "https://files.pythonhosted.org/packages/96/10/7d526c8974f017f1e7ca584c71ee62a638e9334d8d33f27d7cdfc9ae79e4/multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9", size = 10400 }, +] + +[[package]] +name = "numpy" +version = "2.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/78/31103410a57bc2c2b93a3597340a8119588571f6a4539067546cb9a0bfac/numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f", size = 20270701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/30/182db21d4f2a95904cec1a6f779479ea1ac07c0647f064dea454ec650c42/numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4", size = 20947156 }, + { url = "https://files.pythonhosted.org/packages/24/6d/9483566acfbda6c62c6bc74b6e981c777229d2af93c8eb2469b26ac1b7bc/numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854", size = 14133092 }, + { url = "https://files.pythonhosted.org/packages/27/f6/dba8a258acbf9d2bed2525cdcbb9493ef9bae5199d7a9cb92ee7e9b2aea6/numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24", size = 5163515 }, + { url = "https://files.pythonhosted.org/packages/62/30/82116199d1c249446723c68f2c9da40d7f062551036f50b8c4caa42ae252/numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee", size = 6696558 }, + { url = "https://files.pythonhosted.org/packages/0e/b2/54122b3c6df5df3e87582b2e9430f1bdb63af4023c739ba300164c9ae503/numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba", size = 14084742 }, + { url = "https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592", size = 16134051 }, + { url = "https://files.pythonhosted.org/packages/8e/21/efd47800e4affc993e8be50c1b768de038363dd88865920439ef7b422c60/numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb", size = 15578972 }, + { url = "https://files.pythonhosted.org/packages/04/1e/f8bb88f6157045dd5d9b27ccf433d016981032690969aa5c19e332b138c0/numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f", size = 17898106 }, + { url = "https://files.pythonhosted.org/packages/2b/93/df59a5a3897c1f036ae8ff845e45f4081bb06943039ae28a3c1c7c780f22/numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00", size = 6311190 }, + { url = "https://files.pythonhosted.org/packages/46/69/8c4f928741c2a8efa255fdc7e9097527c6dc4e4df147e3cadc5d9357ce85/numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146", size = 12644305 }, + { url = "https://files.pythonhosted.org/packages/2a/d0/bd5ad792e78017f5decfb2ecc947422a3669a34f775679a76317af671ffc/numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7", size = 20933623 }, + { url = "https://files.pythonhosted.org/packages/c3/bc/2b3545766337b95409868f8e62053135bdc7fa2ce630aba983a2aa60b559/numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0", size = 14148681 }, + { url = "https://files.pythonhosted.org/packages/6a/70/67b24d68a56551d43a6ec9fe8c5f91b526d4c1a46a6387b956bf2d64744e/numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392", size = 5148759 }, + { url = "https://files.pythonhosted.org/packages/1c/8b/e2fc8a75fcb7be12d90b31477c9356c0cbb44abce7ffb36be39a0017afad/numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc", size = 6683092 }, + { url = "https://files.pythonhosted.org/packages/13/73/41b7b27f169ecf368b52533edb72e56a133f9e86256e809e169362553b49/numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298", size = 14081422 }, + { url = "https://files.pythonhosted.org/packages/4b/04/e208ff3ae3ddfbafc05910f89546382f15a3f10186b1f56bd99f159689c2/numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7", size = 16132202 }, + { url = "https://files.pythonhosted.org/packages/fe/bc/2218160574d862d5e55f803d88ddcad88beff94791f9c5f86d67bd8fbf1c/numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6", size = 15573131 }, + { url = "https://files.pythonhosted.org/packages/a5/78/97c775bc4f05abc8a8426436b7cb1be806a02a2994b195945600855e3a25/numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd", size = 17894270 }, + { url = "https://files.pythonhosted.org/packages/b9/eb/38c06217a5f6de27dcb41524ca95a44e395e6a1decdc0c99fec0832ce6ae/numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c", size = 6308141 }, + { url = "https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3", size = 12636885 }, + { url = "https://files.pythonhosted.org/packages/fa/e2/793288ede17a0fdc921172916efb40f3cbc2aa97e76c5c84aba6dc7e8747/numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8", size = 20961829 }, + { url = "https://files.pythonhosted.org/packages/3a/75/bb4573f6c462afd1ea5cbedcc362fe3e9bdbcc57aefd37c681be1155fbaa/numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39", size = 14161419 }, + { url = "https://files.pythonhosted.org/packages/03/68/07b4cd01090ca46c7a336958b413cdbe75002286295f2addea767b7f16c9/numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd", size = 5196414 }, + { url = "https://files.pythonhosted.org/packages/a5/fd/d4a29478d622fedff5c4b4b4cedfc37a00691079623c0575978d2446db9e/numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0", size = 6709379 }, + { url = "https://files.pythonhosted.org/packages/41/78/96dddb75bb9be730b87c72f30ffdd62611aba234e4e460576a068c98eff6/numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960", size = 14051725 }, + { url = "https://files.pythonhosted.org/packages/00/06/5306b8199bffac2a29d9119c11f457f6c7d41115a335b78d3f86fad4dbe8/numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8", size = 16101638 }, + { url = "https://files.pythonhosted.org/packages/fa/03/74c5b631ee1ded596945c12027649e6344614144369fd3ec1aaced782882/numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc", size = 15571717 }, + { url = "https://files.pythonhosted.org/packages/cb/dc/4fc7c0283abe0981e3b89f9b332a134e237dd476b0c018e1e21083310c31/numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff", size = 17879998 }, + { url = "https://files.pythonhosted.org/packages/e5/2b/878576190c5cfa29ed896b518cc516aecc7c98a919e20706c12480465f43/numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286", size = 6366896 }, + { url = "https://files.pythonhosted.org/packages/3e/05/eb7eec66b95cf697f08c754ef26c3549d03ebd682819f794cb039574a0a6/numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d", size = 12739119 }, +] + +[[package]] +name = "olefile" +version = "0.47" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565 }, +] + +[[package]] +name = "onnxruntime" +version = "1.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coloredlogs" }, + { name = "flatbuffers" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "sympy" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/42/274438bbc259439fa1606d0d6d2eef4171cdbd2d7a1c3b249b4ba440424b/onnxruntime-1.21.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f615c05869a523a94d0a4de1f0936d0199a473cf104d630fc26174bebd5759bd", size = 33658457 }, + { url = "https://files.pythonhosted.org/packages/9c/93/76f629d4f22571b0b3a29a9d375204faae2bd2b07d557043b56df5848779/onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79dfb1f47386c4edd115b21015354b2f05f5566c40c98606251f15a64add3cbe", size = 14164881 }, + { url = "https://files.pythonhosted.org/packages/1b/86/75cbaa4058758fa8ef912dfebba2d5a4e4fd6738615c15b6a2262d076198/onnxruntime-1.21.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2742935d6610fe0f58e1995018d9db7e8239d0201d9ebbdb7964a61386b5390a", size = 16019966 }, + { url = "https://files.pythonhosted.org/packages/5f/9d/fb8895b2cb38c9965d4b4e0a9aa1398f3e3f16c4acb75cf3b61689780a65/onnxruntime-1.21.1-cp312-cp312-win_amd64.whl", hash = "sha256:a7afdb3fcb162f5536225e13c2b245018068964b1d0eee05303ea6823ca6785e", size = 12302925 }, + { url = "https://files.pythonhosted.org/packages/6d/7e/8445eb44ba9fe0ce0bc77c4b569d79f7e3efd6da2dd87c5a04347e6c134e/onnxruntime-1.21.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:ed4f9771233a92edcab9f11f537702371d450fe6cd79a727b672d37b9dab0cde", size = 33658643 }, + { url = "https://files.pythonhosted.org/packages/ce/46/9c4026d302f1c7e8427bf9fa3da2d7526d9c5200242bde6adee7928ef1c9/onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bc100fd1f4f95258e7d0f7068ec69dec2a47cc693f745eec9cf4561ee8d952a", size = 14165205 }, + { url = "https://files.pythonhosted.org/packages/44/b2/4e4c6b5c03be752d74cb20937961c76f53fe87a9760d5b7345629d35bb31/onnxruntime-1.21.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0fea0d2b98eecf4bebe01f7ce9a265a5d72b3050e9098063bfe65fa2b0633a8e", size = 16019529 }, + { url = "https://files.pythonhosted.org/packages/ec/1d/afca646af339cc6735f3fb7fafb9ca94b578c5b6a0ebd63a312468767bdb/onnxruntime-1.21.1-cp313-cp313-win_amd64.whl", hash = "sha256:da606061b9ed1b05b63a37be38c2014679a3e725903f58036ffd626df45c0e47", size = 12303603 }, + { url = "https://files.pythonhosted.org/packages/a5/12/a01e38c9a6b8d7c28e04d9eb83ad9143d568b961474ba49f0f18a3eeec82/onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94674315d40d521952bfc28007ce9b6728e87753e1f18d243c8cd953f25903b8", size = 14176329 }, + { url = "https://files.pythonhosted.org/packages/3a/72/5ff85c540fd6a465610ce47e4cee8fccb472952fc1d589112f51ae2520a5/onnxruntime-1.21.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c9e4571ff5b2a5d377d414bc85cd9450ba233a9a92f766493874f1093976453", size = 15990556 }, +] + +[[package]] +name = "openai" +version = "1.75.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/b1/318f5d4c482f19c5fcbcde190801bfaaaec23413cda0b88a29f6897448ff/openai-1.75.0.tar.gz", hash = "sha256:fb3ea907efbdb1bcfd0c44507ad9c961afd7dce3147292b54505ecfd17be8fd1", size = 429492 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/9a/f34f163294345f123673ed03e77c33dee2534f3ac1f9d18120384457304d/openai-1.75.0-py3-none-any.whl", hash = "sha256:fe6f932d2ded3b429ff67cc9ad118c71327db32eb9d32dd723de3acfca337125", size = 646972 }, +] + +[[package]] +name = "openpyxl" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "et-xmlfile" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 }, +] + +[[package]] +name = "outcome" +version = "1.3.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, +] + +[[package]] +name = "pdfminer-six" +version = "20250416" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "charset-normalizer" }, + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/27/1a99ce4cfce829bb91040f82a53f33b33fec4e070d2b9c1b45f6796cd8dc/pdfminer_six-20250416.tar.gz", hash = "sha256:30956a85f9d0add806a4e460ed0d67c2b6a48b53323c7ac87de23174596d3acd", size = 7384630 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/32/89749ba23e5020e89fb584c1b39d7da6d7c56a9048307de8a88eec79e2d3/pdfminer_six-20250416-py3-none-any.whl", hash = "sha256:dd2a9ad7bc7dd6b62d009aaa9c101ac9d069a47937724569c375a6a9078da303", size = 5619271 }, +] + +[[package]] +name = "pillow" +version = "11.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", size = 47026707 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", size = 3190185 }, + { url = "https://files.pythonhosted.org/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", size = 3030306 }, + { url = "https://files.pythonhosted.org/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", size = 4416121 }, + { url = "https://files.pythonhosted.org/packages/62/73/972b7742e38ae0e2ac76ab137ca6005dcf877480da0d9d61d93b613065b4/pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4", size = 4501707 }, + { url = "https://files.pythonhosted.org/packages/e4/3a/427e4cb0b9e177efbc1a84798ed20498c4f233abde003c06d2650a6d60cb/pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d", size = 4522921 }, + { url = "https://files.pythonhosted.org/packages/fe/7c/d8b1330458e4d2f3f45d9508796d7caf0c0d3764c00c823d10f6f1a3b76d/pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4", size = 4612523 }, + { url = "https://files.pythonhosted.org/packages/b3/2f/65738384e0b1acf451de5a573d8153fe84103772d139e1e0bdf1596be2ea/pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443", size = 4587836 }, + { url = "https://files.pythonhosted.org/packages/6a/c5/e795c9f2ddf3debb2dedd0df889f2fe4b053308bb59a3cc02a0cd144d641/pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c", size = 4669390 }, + { url = "https://files.pythonhosted.org/packages/96/ae/ca0099a3995976a9fce2f423166f7bff9b12244afdc7520f6ed38911539a/pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3", size = 2332309 }, + { url = "https://files.pythonhosted.org/packages/7c/18/24bff2ad716257fc03da964c5e8f05d9790a779a8895d6566e493ccf0189/pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941", size = 2676768 }, + { url = "https://files.pythonhosted.org/packages/da/bb/e8d656c9543276517ee40184aaa39dcb41e683bca121022f9323ae11b39d/pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb", size = 2415087 }, + { url = "https://files.pythonhosted.org/packages/36/9c/447528ee3776e7ab8897fe33697a7ff3f0475bb490c5ac1456a03dc57956/pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28", size = 3190098 }, + { url = "https://files.pythonhosted.org/packages/b5/09/29d5cd052f7566a63e5b506fac9c60526e9ecc553825551333e1e18a4858/pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830", size = 3030166 }, + { url = "https://files.pythonhosted.org/packages/71/5d/446ee132ad35e7600652133f9c2840b4799bbd8e4adba881284860da0a36/pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0", size = 4408674 }, + { url = "https://files.pythonhosted.org/packages/69/5f/cbe509c0ddf91cc3a03bbacf40e5c2339c4912d16458fcb797bb47bcb269/pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1", size = 4496005 }, + { url = "https://files.pythonhosted.org/packages/f9/b3/dd4338d8fb8a5f312021f2977fb8198a1184893f9b00b02b75d565c33b51/pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f", size = 4518707 }, + { url = "https://files.pythonhosted.org/packages/13/eb/2552ecebc0b887f539111c2cd241f538b8ff5891b8903dfe672e997529be/pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155", size = 4610008 }, + { url = "https://files.pythonhosted.org/packages/72/d1/924ce51bea494cb6e7959522d69d7b1c7e74f6821d84c63c3dc430cbbf3b/pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14", size = 4585420 }, + { url = "https://files.pythonhosted.org/packages/43/ab/8f81312d255d713b99ca37479a4cb4b0f48195e530cdc1611990eb8fd04b/pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b", size = 4667655 }, + { url = "https://files.pythonhosted.org/packages/94/86/8f2e9d2dc3d308dfd137a07fe1cc478df0a23d42a6c4093b087e738e4827/pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2", size = 2332329 }, + { url = "https://files.pythonhosted.org/packages/6d/ec/1179083b8d6067a613e4d595359b5fdea65d0a3b7ad623fee906e1b3c4d2/pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691", size = 2676388 }, + { url = "https://files.pythonhosted.org/packages/23/f1/2fc1e1e294de897df39fa8622d829b8828ddad938b0eaea256d65b84dd72/pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c", size = 2414950 }, + { url = "https://files.pythonhosted.org/packages/c4/3e/c328c48b3f0ead7bab765a84b4977acb29f101d10e4ef57a5e3400447c03/pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22", size = 3192759 }, + { url = "https://files.pythonhosted.org/packages/18/0e/1c68532d833fc8b9f404d3a642991441d9058eccd5606eab31617f29b6d4/pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7", size = 3033284 }, + { url = "https://files.pythonhosted.org/packages/b7/cb/6faf3fb1e7705fd2db74e070f3bf6f88693601b0ed8e81049a8266de4754/pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16", size = 4445826 }, + { url = "https://files.pythonhosted.org/packages/07/94/8be03d50b70ca47fb434a358919d6a8d6580f282bbb7af7e4aa40103461d/pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b", size = 4527329 }, + { url = "https://files.pythonhosted.org/packages/fd/a4/bfe78777076dc405e3bd2080bc32da5ab3945b5a25dc5d8acaa9de64a162/pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406", size = 4549049 }, + { url = "https://files.pythonhosted.org/packages/65/4d/eaf9068dc687c24979e977ce5677e253624bd8b616b286f543f0c1b91662/pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91", size = 4635408 }, + { url = "https://files.pythonhosted.org/packages/1d/26/0fd443365d9c63bc79feb219f97d935cd4b93af28353cba78d8e77b61719/pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751", size = 4614863 }, + { url = "https://files.pythonhosted.org/packages/49/65/dca4d2506be482c2c6641cacdba5c602bc76d8ceb618fd37de855653a419/pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9", size = 4692938 }, + { url = "https://files.pythonhosted.org/packages/b3/92/1ca0c3f09233bd7decf8f7105a1c4e3162fb9142128c74adad0fb361b7eb/pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd", size = 2335774 }, + { url = "https://files.pythonhosted.org/packages/a5/ac/77525347cb43b83ae905ffe257bbe2cc6fd23acb9796639a1f56aa59d191/pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e", size = 2681895 }, + { url = "https://files.pythonhosted.org/packages/67/32/32dc030cfa91ca0fc52baebbba2e009bb001122a1daa8b6a79ad830b38d3/pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", size = 2417234 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "postgrest" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecation" }, + { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/fb/be6216146156a22069fe87cea086e0308ca3595c10d7df90b70ef6ec339f/postgrest-1.0.1.tar.gz", hash = "sha256:0d6556dadfd8392147d98aad097fe7bf0196602e28a58eee5e9bde4390bb573f", size = 15147 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/0b/526f09779066e5c7716ede56a0394b1282a66b8381974879a77ae590c639/postgrest-1.0.1-py3-none-any.whl", hash = "sha256:fcc0518d68d924198c41c8cbaa70c342c641cb49311be33ba4fc74b4e742f22e", size = 22307 }, +] + +[[package]] +name = "propcache" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/c8/fdc6686a986feae3541ea23dcaa661bd93972d3940460646c6bb96e21c40/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf", size = 43651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/aa/ca78d9be314d1e15ff517b992bebbed3bdfef5b8919e85bf4940e57b6137/propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723", size = 80430 }, + { url = "https://files.pythonhosted.org/packages/1a/d8/f0c17c44d1cda0ad1979af2e593ea290defdde9eaeb89b08abbe02a5e8e1/propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976", size = 46637 }, + { url = "https://files.pythonhosted.org/packages/ae/bd/c1e37265910752e6e5e8a4c1605d0129e5b7933c3dc3cf1b9b48ed83b364/propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b", size = 46123 }, + { url = "https://files.pythonhosted.org/packages/d4/b0/911eda0865f90c0c7e9f0415d40a5bf681204da5fd7ca089361a64c16b28/propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f", size = 243031 }, + { url = "https://files.pythonhosted.org/packages/0a/06/0da53397c76a74271621807265b6eb61fb011451b1ddebf43213df763669/propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70", size = 249100 }, + { url = "https://files.pythonhosted.org/packages/f1/eb/13090e05bf6b963fc1653cdc922133ced467cb4b8dab53158db5a37aa21e/propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7", size = 250170 }, + { url = "https://files.pythonhosted.org/packages/3b/4c/f72c9e1022b3b043ec7dc475a0f405d4c3e10b9b1d378a7330fecf0652da/propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25", size = 245000 }, + { url = "https://files.pythonhosted.org/packages/e8/fd/970ca0e22acc829f1adf5de3724085e778c1ad8a75bec010049502cb3a86/propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277", size = 230262 }, + { url = "https://files.pythonhosted.org/packages/c4/42/817289120c6b9194a44f6c3e6b2c3277c5b70bbad39e7df648f177cc3634/propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8", size = 236772 }, + { url = "https://files.pythonhosted.org/packages/7c/9c/3b3942b302badd589ad6b672da3ca7b660a6c2f505cafd058133ddc73918/propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e", size = 231133 }, + { url = "https://files.pythonhosted.org/packages/98/a1/75f6355f9ad039108ff000dfc2e19962c8dea0430da9a1428e7975cf24b2/propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee", size = 230741 }, + { url = "https://files.pythonhosted.org/packages/67/0c/3e82563af77d1f8731132166da69fdfd95e71210e31f18edce08a1eb11ea/propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815", size = 244047 }, + { url = "https://files.pythonhosted.org/packages/f7/50/9fb7cca01532a08c4d5186d7bb2da6c4c587825c0ae134b89b47c7d62628/propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5", size = 246467 }, + { url = "https://files.pythonhosted.org/packages/a9/02/ccbcf3e1c604c16cc525309161d57412c23cf2351523aedbb280eb7c9094/propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7", size = 241022 }, + { url = "https://files.pythonhosted.org/packages/db/19/e777227545e09ca1e77a6e21274ae9ec45de0f589f0ce3eca2a41f366220/propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b", size = 40647 }, + { url = "https://files.pythonhosted.org/packages/24/bb/3b1b01da5dd04c77a204c84e538ff11f624e31431cfde7201d9110b092b1/propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3", size = 44784 }, + { url = "https://files.pythonhosted.org/packages/58/60/f645cc8b570f99be3cf46714170c2de4b4c9d6b827b912811eff1eb8a412/propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8", size = 77865 }, + { url = "https://files.pythonhosted.org/packages/6f/d4/c1adbf3901537582e65cf90fd9c26fde1298fde5a2c593f987112c0d0798/propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f", size = 45452 }, + { url = "https://files.pythonhosted.org/packages/d1/b5/fe752b2e63f49f727c6c1c224175d21b7d1727ce1d4873ef1c24c9216830/propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111", size = 44800 }, + { url = "https://files.pythonhosted.org/packages/62/37/fc357e345bc1971e21f76597028b059c3d795c5ca7690d7a8d9a03c9708a/propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5", size = 225804 }, + { url = "https://files.pythonhosted.org/packages/0d/f1/16e12c33e3dbe7f8b737809bad05719cff1dccb8df4dafbcff5575002c0e/propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb", size = 230650 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/018b9f2ed876bf5091e60153f727e8f9073d97573f790ff7cdf6bc1d1fb8/propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7", size = 234235 }, + { url = "https://files.pythonhosted.org/packages/45/5f/3faee66fc930dfb5da509e34c6ac7128870631c0e3582987fad161fcb4b1/propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120", size = 228249 }, + { url = "https://files.pythonhosted.org/packages/62/1e/a0d5ebda5da7ff34d2f5259a3e171a94be83c41eb1e7cd21a2105a84a02e/propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654", size = 214964 }, + { url = "https://files.pythonhosted.org/packages/db/a0/d72da3f61ceab126e9be1f3bc7844b4e98c6e61c985097474668e7e52152/propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e", size = 222501 }, + { url = "https://files.pythonhosted.org/packages/18/6d/a008e07ad7b905011253adbbd97e5b5375c33f0b961355ca0a30377504ac/propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b", size = 217917 }, + { url = "https://files.pythonhosted.org/packages/98/37/02c9343ffe59e590e0e56dc5c97d0da2b8b19fa747ebacf158310f97a79a/propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53", size = 217089 }, + { url = "https://files.pythonhosted.org/packages/53/1b/d3406629a2c8a5666d4674c50f757a77be119b113eedd47b0375afdf1b42/propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5", size = 228102 }, + { url = "https://files.pythonhosted.org/packages/cd/a7/3664756cf50ce739e5f3abd48febc0be1a713b1f389a502ca819791a6b69/propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7", size = 230122 }, + { url = "https://files.pythonhosted.org/packages/35/36/0bbabaacdcc26dac4f8139625e930f4311864251276033a52fd52ff2a274/propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef", size = 226818 }, + { url = "https://files.pythonhosted.org/packages/cc/27/4e0ef21084b53bd35d4dae1634b6d0bad35e9c58ed4f032511acca9d4d26/propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24", size = 40112 }, + { url = "https://files.pythonhosted.org/packages/a6/2c/a54614d61895ba6dd7ac8f107e2b2a0347259ab29cbf2ecc7b94fa38c4dc/propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037", size = 44034 }, + { url = "https://files.pythonhosted.org/packages/5a/a8/0a4fd2f664fc6acc66438370905124ce62e84e2e860f2557015ee4a61c7e/propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f", size = 82613 }, + { url = "https://files.pythonhosted.org/packages/4d/e5/5ef30eb2cd81576256d7b6caaa0ce33cd1d2c2c92c8903cccb1af1a4ff2f/propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c", size = 47763 }, + { url = "https://files.pythonhosted.org/packages/87/9a/87091ceb048efeba4d28e903c0b15bcc84b7c0bf27dc0261e62335d9b7b8/propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc", size = 47175 }, + { url = "https://files.pythonhosted.org/packages/3e/2f/854e653c96ad1161f96194c6678a41bbb38c7947d17768e8811a77635a08/propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de", size = 292265 }, + { url = "https://files.pythonhosted.org/packages/40/8d/090955e13ed06bc3496ba4a9fb26c62e209ac41973cb0d6222de20c6868f/propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6", size = 294412 }, + { url = "https://files.pythonhosted.org/packages/39/e6/d51601342e53cc7582449e6a3c14a0479fab2f0750c1f4d22302e34219c6/propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7", size = 294290 }, + { url = "https://files.pythonhosted.org/packages/3b/4d/be5f1a90abc1881884aa5878989a1acdafd379a91d9c7e5e12cef37ec0d7/propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458", size = 282926 }, + { url = "https://files.pythonhosted.org/packages/57/2b/8f61b998c7ea93a2b7eca79e53f3e903db1787fca9373af9e2cf8dc22f9d/propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11", size = 267808 }, + { url = "https://files.pythonhosted.org/packages/11/1c/311326c3dfce59c58a6098388ba984b0e5fb0381ef2279ec458ef99bd547/propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c", size = 290916 }, + { url = "https://files.pythonhosted.org/packages/4b/74/91939924b0385e54dc48eb2e4edd1e4903ffd053cf1916ebc5347ac227f7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf", size = 262661 }, + { url = "https://files.pythonhosted.org/packages/c2/d7/e6079af45136ad325c5337f5dd9ef97ab5dc349e0ff362fe5c5db95e2454/propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27", size = 264384 }, + { url = "https://files.pythonhosted.org/packages/b7/d5/ba91702207ac61ae6f1c2da81c5d0d6bf6ce89e08a2b4d44e411c0bbe867/propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757", size = 291420 }, + { url = "https://files.pythonhosted.org/packages/58/70/2117780ed7edcd7ba6b8134cb7802aada90b894a9810ec56b7bb6018bee7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18", size = 290880 }, + { url = "https://files.pythonhosted.org/packages/4a/1f/ecd9ce27710021ae623631c0146719280a929d895a095f6d85efb6a0be2e/propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a", size = 287407 }, + { url = "https://files.pythonhosted.org/packages/3e/66/2e90547d6b60180fb29e23dc87bd8c116517d4255240ec6d3f7dc23d1926/propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d", size = 42573 }, + { url = "https://files.pythonhosted.org/packages/cb/8f/50ad8599399d1861b4d2b6b45271f0ef6af1b09b0a2386a46dbaf19c9535/propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e", size = 46757 }, + { url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376 }, +] + +[[package]] +name = "protobuf" +version = "6.30.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/8c/cf2ac658216eebe49eaedf1e06bc06cbf6a143469236294a1171a51357c3/protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048", size = 429315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/85/cd53abe6a6cbf2e0029243d6ae5fb4335da2996f6c177bb2ce685068e43d/protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103", size = 419148 }, + { url = "https://files.pythonhosted.org/packages/97/e9/7b9f1b259d509aef2b833c29a1f3c39185e2bf21c9c1be1cd11c22cb2149/protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9", size = 431003 }, + { url = "https://files.pythonhosted.org/packages/8e/66/7f3b121f59097c93267e7f497f10e52ced7161b38295137a12a266b6c149/protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b", size = 417579 }, + { url = "https://files.pythonhosted.org/packages/d0/89/bbb1bff09600e662ad5b384420ad92de61cab2ed0f12ace1fd081fd4c295/protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815", size = 317319 }, + { url = "https://files.pythonhosted.org/packages/28/50/1925de813499546bc8ab3ae857e3ec84efe7d2f19b34529d0c7c3d02d11d/protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d", size = 316212 }, + { url = "https://files.pythonhosted.org/packages/e5/a1/93c2acf4ade3c5b557d02d500b06798f4ed2c176fa03e3c34973ca92df7f/protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51", size = 167062 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "2.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591 }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640 }, + { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649 }, + { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472 }, + { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509 }, + { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702 }, + { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428 }, + { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753 }, + { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849 }, + { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541 }, + { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225 }, + { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373 }, + { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034 }, + { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848 }, + { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986 }, + { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551 }, + { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785 }, + { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758 }, + { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109 }, + { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159 }, + { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222 }, + { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980 }, + { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840 }, + { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518 }, + { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025 }, + { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991 }, + { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262 }, + { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626 }, + { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590 }, + { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963 }, + { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896 }, + { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356 }, +] + +[[package]] +name = "pydub" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/9a/e6bca0eed82db26562c73b5076539a4a08d3cffd19c3cc5913a3e61145fd/pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f", size = 38326 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327 }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, +] + +[[package]] +name = "pytest" +version = "8.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, +] + +[[package]] +name = "python-pptx" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "lxml" }, + { name = "pillow" }, + { name = "typing-extensions" }, + { name = "xlsxwriter" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788 }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, +] + +[[package]] +name = "pywa" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/69/7a13b5f17dd2ab4f0ade72429cc574fcbb7b0fb1b059f46afdaaeb1284ae/pywa-2.9.0.tar.gz", hash = "sha256:8a4961c4e96e2424c081df2e70f6ac3e299c2b31c8f9c1ccd0f37b0318544f83", size = 196898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/d0/972a2f7e6aa97d9793ff0a19ca836e813ac88a8d63c1c7dce603f05afa3d/pywa-2.9.0-py3-none-any.whl", hash = "sha256:a8d4619e4e8aa8d06fb7766a83ec1ec4f417aa19637f9537b6674878c09832a0", size = 190839 }, +] + +[[package]] +name = "realtime" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "python-dateutil" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/1e/c5f47928789cd5abb96e527929dea088213968f785983a231b3dfe08cc4f/realtime-2.4.2.tar.gz", hash = "sha256:760308d5310533f65a9098e0b482a518f6ad2f3c0f2723e83cf5856865bafc5d", size = 18802 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/b7/1b7651f353e14543c60cdfe40e3ea4dea412cfb2e93ab6384e72be813f05/realtime-2.4.2-py3-none-any.whl", hash = "sha256:0cc1b4a097acf9c0bd3a2f1998170de47744574c606617285113ddb3021e54ca", size = 22025 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "slack-bolt" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "slack-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/c9/7421458765061cb98e01a12d6cadc273ee35e75856b0073ec7c3efa5cf46/slack_bolt-1.23.0.tar.gz", hash = "sha256:3d2c3eb13131407a94f925eb22b180d352c2d97b808303ef92b7a46d6508c843", size = 130659 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/48/6f435d702a680307488c7d8d3bd6e12552bbf2ff3e0eb67ccb7d0850d122/slack_bolt-1.23.0-py2.py3-none-any.whl", hash = "sha256:6d6ae39d80c964c362505ae4e587eed2b26dbc3a9f0cb76af1150c30fb670488", size = 229691 }, +] + +[[package]] +name = "slack-sdk" +version = "3.35.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/a5/13077a5696ded22cc955ff6314028b7e6140b1c989b19ca27a6b26590e6e/slack_sdk-3.35.0.tar.gz", hash = "sha256:8183b6cbf26a0c1e2441478cd9c0dc4eef08d60c1394cfdc9a769e309a9b6459", size = 232887 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/8e/eed71dc79a187ba32681f12a104786ab89355bc474082211d92e1fba6bcf/slack_sdk-3.35.0-py2.py3-none-any.whl", hash = "sha256:00933d171fbd8a068b321ebb5f89612cc781d3183d8e3447c85499eca9d865be", size = 293272 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "speechrecognition" +version = "3.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, + { name = "standard-aifc", marker = "python_full_version >= '3.13'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/81/2f98238647f409d8faa209a0cbe9a1e2be74eeca5f739971698a2b54b12d/speechrecognition-3.14.2.tar.gz", hash = "sha256:2daa467f0b5686017ff3f9a64dcfa1a789ee10d1b0ada3be74bfad10eaef5f49", size = 32857832 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/7a/ef9a0a3ddd7e8b304906bf0d7b1f3cd92759d7ea8be10d284183b2e8f47c/speechrecognition-3.14.2-py3-none-any.whl", hash = "sha256:42940b95295b358fdd7415daa01260c8b20025d6b4000fbbaa3458f005d912b7", size = 32853272 }, +] + +[[package]] +name = "sse-starlette" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "starlette" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, +] + +[[package]] +name = "standard-aifc" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, + { name = "standard-chunk", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/53/6050dc3dde1671eb3db592c13b55a8005e5040131f7509cef0215212cb84/standard_aifc-3.13.0.tar.gz", hash = "sha256:64e249c7cb4b3daf2fdba4e95721f811bde8bdfc43ad9f936589b7bb2fae2e43", size = 15240 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/52/5fbb203394cc852334d1575cc020f6bcec768d2265355984dfd361968f36/standard_aifc-3.13.0-py3-none-any.whl", hash = "sha256:f7ae09cc57de1224a0dd8e3eb8f73830be7c3d0bc485de4c1f82b4a7f645ac66", size = 10492 }, +] + +[[package]] +name = "standard-chunk" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/06/ce1bb165c1f111c7d23a1ad17204d67224baa69725bb6857a264db61beaf/standard_chunk-3.13.0.tar.gz", hash = "sha256:4ac345d37d7e686d2755e01836b8d98eda0d1a3ee90375e597ae43aaf064d654", size = 4672 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/90/a5c1084d87767d787a6caba615aa50dc587229646308d9420c960cb5e4c0/standard_chunk-3.13.0-py3-none-any.whl", hash = "sha256:17880a26c285189c644bd5bd8f8ed2bdb795d216e3293e6dbe55bbd848e2982c", size = 4944 }, +] + +[[package]] +name = "starlette" +version = "0.46.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 }, +] + +[[package]] +name = "storage3" +version = "0.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/25/83eb4e4612dc07a3bb3cab96253c9c83752d4816f2cf38aa832dfb8d8813/storage3-0.11.3.tar.gz", hash = "sha256:883637132aad36d9d92b7c497a8a56dff7c51f15faf2ff7acbccefbbd5e97347", size = 9930 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/8d/ff89f85c4b48285ac7cddf0fafe5e55bb3742d374672b2fbd2627c213fa6/storage3-0.11.3-py3-none-any.whl", hash = "sha256:090c42152217d5d39bd94af3ddeb60c8982f3a283dcd90b53d058f2db33e6007", size = 17831 }, +] + +[[package]] +name = "strenum" +version = "0.4.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851 }, +] + +[[package]] +name = "supabase" +version = "2.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gotrue" }, + { name = "httpx" }, + { name = "postgrest" }, + { name = "realtime" }, + { name = "storage3" }, + { name = "supafunc" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/3d6f6826daacf526a559c25e7c8eaa824dcefc640e1123f5d9d91a50fa60/supabase-2.15.0.tar.gz", hash = "sha256:2e66289ad74ae9c4cb04a69f9de00cd2ce880cd890de23269a40ac5b69151d26", size = 14444 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/08/0f27c01111c1e9233244cd60b3f01484772447768eea69fe07cbafde4424/supabase-2.15.0-py3-none-any.whl", hash = "sha256:a665c7ab6c8ad1d80609ab62ad657f66fdaf38070ec9e0db5c7887fd72b109c0", size = 17229 }, +] + +[[package]] +name = "supafunc" +version = "0.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "strenum" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/74/4f9e23690d2dfc0afb4a13d2d232415a6ef9b80397495afb548410035532/supafunc-0.9.4.tar.gz", hash = "sha256:68824a9a7bcccf5ab1e038cda632ba47cba27f2a7dc606014206b56f5a071de2", size = 4806 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/51/b0bb6d405c053ecf9c51267b5a429424cab9ae3de229a1dfda3197ab251f/supafunc-0.9.4-py3-none-any.whl", hash = "sha256:2b34a794fb7930953150a434cdb93c24a04cf526b2f51a9e60b2be0b86d44fb2", size = 7792 }, +] + +[[package]] +name = "sympy" +version = "1.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/8a/5a7fd6284fa8caac23a26c9ddf9c30485a48169344b4bd3b0f02fef1890f/sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9", size = 7533196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/ff/c87e0622b1dadea79d2fb0b25ade9ed98954c9033722eb707053d310d4f3/sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73", size = 6189483 }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, +] + +[[package]] +name = "trio" +version = "0.29.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, + { name = "idna" }, + { name = "outcome" }, + { name = "sniffio" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 }, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 }, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 }, + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 }, +] + +[[package]] +name = "websockets" +version = "14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/54/8359678c726243d19fae38ca14a334e740782336c9f19700858c4eb64a1e/websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5", size = 164394 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/81/04f7a397653dc8bec94ddc071f34833e8b99b13ef1a3804c149d59f92c18/websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c", size = 163096 }, + { url = "https://files.pythonhosted.org/packages/ec/c5/de30e88557e4d70988ed4d2eabd73fd3e1e52456b9f3a4e9564d86353b6d/websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967", size = 160758 }, + { url = "https://files.pythonhosted.org/packages/e5/8c/d130d668781f2c77d106c007b6c6c1d9db68239107c41ba109f09e6c218a/websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990", size = 160995 }, + { url = "https://files.pythonhosted.org/packages/a6/bc/f6678a0ff17246df4f06765e22fc9d98d1b11a258cc50c5968b33d6742a1/websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda", size = 170815 }, + { url = "https://files.pythonhosted.org/packages/d8/b2/8070cb970c2e4122a6ef38bc5b203415fd46460e025652e1ee3f2f43a9a3/websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95", size = 169759 }, + { url = "https://files.pythonhosted.org/packages/81/da/72f7caabd94652e6eb7e92ed2d3da818626e70b4f2b15a854ef60bf501ec/websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3", size = 170178 }, + { url = "https://files.pythonhosted.org/packages/31/e0/812725b6deca8afd3a08a2e81b3c4c120c17f68c9b84522a520b816cda58/websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9", size = 170453 }, + { url = "https://files.pythonhosted.org/packages/66/d3/8275dbc231e5ba9bb0c4f93144394b4194402a7a0c8ffaca5307a58ab5e3/websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267", size = 169830 }, + { url = "https://files.pythonhosted.org/packages/a3/ae/e7d1a56755ae15ad5a94e80dd490ad09e345365199600b2629b18ee37bc7/websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe", size = 169824 }, + { url = "https://files.pythonhosted.org/packages/b6/32/88ccdd63cb261e77b882e706108d072e4f1c839ed723bf91a3e1f216bf60/websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205", size = 163981 }, + { url = "https://files.pythonhosted.org/packages/b3/7d/32cdb77990b3bdc34a306e0a0f73a1275221e9a66d869f6ff833c95b56ef/websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce", size = 164421 }, + { url = "https://files.pythonhosted.org/packages/82/94/4f9b55099a4603ac53c2912e1f043d6c49d23e94dd82a9ce1eb554a90215/websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e", size = 163102 }, + { url = "https://files.pythonhosted.org/packages/8e/b7/7484905215627909d9a79ae07070057afe477433fdacb59bf608ce86365a/websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad", size = 160766 }, + { url = "https://files.pythonhosted.org/packages/a3/a4/edb62efc84adb61883c7d2c6ad65181cb087c64252138e12d655989eec05/websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03", size = 160998 }, + { url = "https://files.pythonhosted.org/packages/f5/79/036d320dc894b96af14eac2529967a6fc8b74f03b83c487e7a0e9043d842/websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f", size = 170780 }, + { url = "https://files.pythonhosted.org/packages/63/75/5737d21ee4dd7e4b9d487ee044af24a935e36a9ff1e1419d684feedcba71/websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5", size = 169717 }, + { url = "https://files.pythonhosted.org/packages/2c/3c/bf9b2c396ed86a0b4a92ff4cdaee09753d3ee389be738e92b9bbd0330b64/websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a", size = 170155 }, + { url = "https://files.pythonhosted.org/packages/75/2d/83a5aca7247a655b1da5eb0ee73413abd5c3a57fc8b92915805e6033359d/websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20", size = 170495 }, + { url = "https://files.pythonhosted.org/packages/79/dd/699238a92761e2f943885e091486378813ac8f43e3c84990bc394c2be93e/websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2", size = 169880 }, + { url = "https://files.pythonhosted.org/packages/c8/c9/67a8f08923cf55ce61aadda72089e3ed4353a95a3a4bc8bf42082810e580/websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307", size = 169856 }, + { url = "https://files.pythonhosted.org/packages/17/b1/1ffdb2680c64e9c3921d99db460546194c40d4acbef999a18c37aa4d58a3/websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc", size = 163974 }, + { url = "https://files.pythonhosted.org/packages/14/13/8b7fc4cb551b9cfd9890f0fd66e53c18a06240319915533b033a56a3d520/websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f", size = 164420 }, + { url = "https://files.pythonhosted.org/packages/7b/c8/d529f8a32ce40d98309f4470780631e971a5a842b60aec864833b3615786/websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b", size = 157416 }, +] + +[[package]] +name = "xlrd" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/b3/19a2540d21dea5f908304375bd43f5ed7a4c28a370dc9122c565423e6b44/xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88", size = 100259 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/0c/c2a72d51fe56e08a08acc85d13013558a2d793028ae7385448a6ccdfae64/xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd", size = 96531 }, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/d1/e026d33dd5d552e5bf3a873dee54dad66b550230df8290d79394f09b2315/xlsxwriter-3.2.3.tar.gz", hash = "sha256:ad6fd41bdcf1b885876b1f6b7087560aecc9ae5a9cc2ba97dcac7ab2e210d3d5", size = 209135 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/b1/a252d499f2760b314fcf264d2b36fcc4343a1ecdb25492b210cb0db70a68/XlsxWriter-3.2.3-py3-none-any.whl", hash = "sha256:593f8296e8a91790c6d0378ab08b064f34a642b3feb787cf6738236bd0a4860d", size = 169433 }, +] + +[[package]] +name = "yarl" +version = "1.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/51/c0edba5219027f6eab262e139f73e2417b0f4efffa23bf562f6e18f76ca5/yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307", size = 185258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/e8/3efdcb83073df978bb5b1a9cc0360ce596680e6c3fac01f2a994ccbb8939/yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f", size = 147089 }, + { url = "https://files.pythonhosted.org/packages/60/c3/9e776e98ea350f76f94dd80b408eaa54e5092643dbf65fd9babcffb60509/yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e", size = 97706 }, + { url = "https://files.pythonhosted.org/packages/0c/5b/45cdfb64a3b855ce074ae607b9fc40bc82e7613b94e7612b030255c93a09/yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e", size = 95719 }, + { url = "https://files.pythonhosted.org/packages/2d/4e/929633b249611eeed04e2f861a14ed001acca3ef9ec2a984a757b1515889/yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33", size = 343972 }, + { url = "https://files.pythonhosted.org/packages/49/fd/047535d326c913f1a90407a3baf7ff535b10098611eaef2c527e32e81ca1/yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58", size = 339639 }, + { url = "https://files.pythonhosted.org/packages/48/2f/11566f1176a78f4bafb0937c0072410b1b0d3640b297944a6a7a556e1d0b/yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f", size = 353745 }, + { url = "https://files.pythonhosted.org/packages/26/17/07dfcf034d6ae8837b33988be66045dd52f878dfb1c4e8f80a7343f677be/yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae", size = 354178 }, + { url = "https://files.pythonhosted.org/packages/15/45/212604d3142d84b4065d5f8cab6582ed3d78e4cc250568ef2a36fe1cf0a5/yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018", size = 349219 }, + { url = "https://files.pythonhosted.org/packages/e6/e0/a10b30f294111c5f1c682461e9459935c17d467a760c21e1f7db400ff499/yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672", size = 337266 }, + { url = "https://files.pythonhosted.org/packages/33/a6/6efa1d85a675d25a46a167f9f3e80104cde317dfdf7f53f112ae6b16a60a/yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8", size = 360873 }, + { url = "https://files.pythonhosted.org/packages/77/67/c8ab718cb98dfa2ae9ba0f97bf3cbb7d45d37f13fe1fbad25ac92940954e/yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7", size = 360524 }, + { url = "https://files.pythonhosted.org/packages/bd/e8/c3f18660cea1bc73d9f8a2b3ef423def8dadbbae6c4afabdb920b73e0ead/yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594", size = 365370 }, + { url = "https://files.pythonhosted.org/packages/c9/99/33f3b97b065e62ff2d52817155a89cfa030a1a9b43fee7843ef560ad9603/yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6", size = 373297 }, + { url = "https://files.pythonhosted.org/packages/3d/89/7519e79e264a5f08653d2446b26d4724b01198a93a74d2e259291d538ab1/yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1", size = 378771 }, + { url = "https://files.pythonhosted.org/packages/3a/58/6c460bbb884abd2917c3eef6f663a4a873f8dc6f498561fc0ad92231c113/yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b", size = 375000 }, + { url = "https://files.pythonhosted.org/packages/3b/2a/dd7ed1aa23fea996834278d7ff178f215b24324ee527df53d45e34d21d28/yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64", size = 86355 }, + { url = "https://files.pythonhosted.org/packages/ca/c6/333fe0338305c0ac1c16d5aa7cc4841208d3252bbe62172e0051006b5445/yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c", size = 92904 }, + { url = "https://files.pythonhosted.org/packages/0f/6f/514c9bff2900c22a4f10e06297714dbaf98707143b37ff0bcba65a956221/yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f", size = 145030 }, + { url = "https://files.pythonhosted.org/packages/4e/9d/f88da3fa319b8c9c813389bfb3463e8d777c62654c7168e580a13fadff05/yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3", size = 96894 }, + { url = "https://files.pythonhosted.org/packages/cd/57/92e83538580a6968b2451d6c89c5579938a7309d4785748e8ad42ddafdce/yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d", size = 94457 }, + { url = "https://files.pythonhosted.org/packages/e9/ee/7ee43bd4cf82dddd5da97fcaddb6fa541ab81f3ed564c42f146c83ae17ce/yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0", size = 343070 }, + { url = "https://files.pythonhosted.org/packages/4a/12/b5eccd1109e2097bcc494ba7dc5de156e41cf8309fab437ebb7c2b296ce3/yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501", size = 337739 }, + { url = "https://files.pythonhosted.org/packages/7d/6b/0eade8e49af9fc2585552f63c76fa59ef469c724cc05b29519b19aa3a6d5/yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc", size = 351338 }, + { url = "https://files.pythonhosted.org/packages/45/cb/aaaa75d30087b5183c7b8a07b4fb16ae0682dd149a1719b3a28f54061754/yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d", size = 353636 }, + { url = "https://files.pythonhosted.org/packages/98/9d/d9cb39ec68a91ba6e66fa86d97003f58570327d6713833edf7ad6ce9dde5/yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0", size = 348061 }, + { url = "https://files.pythonhosted.org/packages/72/6b/103940aae893d0cc770b4c36ce80e2ed86fcb863d48ea80a752b8bda9303/yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a", size = 334150 }, + { url = "https://files.pythonhosted.org/packages/ef/b2/986bd82aa222c3e6b211a69c9081ba46484cffa9fab2a5235e8d18ca7a27/yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2", size = 362207 }, + { url = "https://files.pythonhosted.org/packages/14/7c/63f5922437b873795d9422cbe7eb2509d4b540c37ae5548a4bb68fd2c546/yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9", size = 361277 }, + { url = "https://files.pythonhosted.org/packages/81/83/450938cccf732466953406570bdb42c62b5ffb0ac7ac75a1f267773ab5c8/yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5", size = 364990 }, + { url = "https://files.pythonhosted.org/packages/b4/de/af47d3a47e4a833693b9ec8e87debb20f09d9fdc9139b207b09a3e6cbd5a/yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877", size = 374684 }, + { url = "https://files.pythonhosted.org/packages/62/0b/078bcc2d539f1faffdc7d32cb29a2d7caa65f1a6f7e40795d8485db21851/yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e", size = 382599 }, + { url = "https://files.pythonhosted.org/packages/74/a9/4fdb1a7899f1fb47fd1371e7ba9e94bff73439ce87099d5dd26d285fffe0/yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384", size = 378573 }, + { url = "https://files.pythonhosted.org/packages/fd/be/29f5156b7a319e4d2e5b51ce622b4dfb3aa8d8204cd2a8a339340fbfad40/yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62", size = 86051 }, + { url = "https://files.pythonhosted.org/packages/52/56/05fa52c32c301da77ec0b5f63d2d9605946fe29defacb2a7ebd473c23b81/yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c", size = 92742 }, + { url = "https://files.pythonhosted.org/packages/d4/2f/422546794196519152fc2e2f475f0e1d4d094a11995c81a465faf5673ffd/yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051", size = 163575 }, + { url = "https://files.pythonhosted.org/packages/90/fc/67c64ddab6c0b4a169d03c637fb2d2a212b536e1989dec8e7e2c92211b7f/yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d", size = 106121 }, + { url = "https://files.pythonhosted.org/packages/6d/00/29366b9eba7b6f6baed7d749f12add209b987c4cfbfa418404dbadc0f97c/yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229", size = 103815 }, + { url = "https://files.pythonhosted.org/packages/28/f4/a2a4c967c8323c03689383dff73396281ced3b35d0ed140580825c826af7/yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1", size = 408231 }, + { url = "https://files.pythonhosted.org/packages/0f/a1/66f7ffc0915877d726b70cc7a896ac30b6ac5d1d2760613603b022173635/yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb", size = 390221 }, + { url = "https://files.pythonhosted.org/packages/41/15/cc248f0504610283271615e85bf38bc014224122498c2016d13a3a1b8426/yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00", size = 411400 }, + { url = "https://files.pythonhosted.org/packages/5c/af/f0823d7e092bfb97d24fce6c7269d67fcd1aefade97d0a8189c4452e4d5e/yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de", size = 411714 }, + { url = "https://files.pythonhosted.org/packages/83/70/be418329eae64b9f1b20ecdaac75d53aef098797d4c2299d82ae6f8e4663/yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5", size = 404279 }, + { url = "https://files.pythonhosted.org/packages/19/f5/52e02f0075f65b4914eb890eea1ba97e6fd91dd821cc33a623aa707b2f67/yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a", size = 384044 }, + { url = "https://files.pythonhosted.org/packages/6a/36/b0fa25226b03d3f769c68d46170b3e92b00ab3853d73127273ba22474697/yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9", size = 416236 }, + { url = "https://files.pythonhosted.org/packages/cb/3a/54c828dd35f6831dfdd5a79e6c6b4302ae2c5feca24232a83cb75132b205/yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145", size = 402034 }, + { url = "https://files.pythonhosted.org/packages/10/97/c7bf5fba488f7e049f9ad69c1b8fdfe3daa2e8916b3d321aa049e361a55a/yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda", size = 407943 }, + { url = "https://files.pythonhosted.org/packages/fd/a4/022d2555c1e8fcff08ad7f0f43e4df3aba34f135bff04dd35d5526ce54ab/yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f", size = 423058 }, + { url = "https://files.pythonhosted.org/packages/4c/f6/0873a05563e5df29ccf35345a6ae0ac9e66588b41fdb7043a65848f03139/yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd", size = 423792 }, + { url = "https://files.pythonhosted.org/packages/9e/35/43fbbd082708fa42e923f314c24f8277a28483d219e049552e5007a9aaca/yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f", size = 422242 }, + { url = "https://files.pythonhosted.org/packages/ed/f7/f0f2500cf0c469beb2050b522c7815c575811627e6d3eb9ec7550ddd0bfe/yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac", size = 93816 }, + { url = "https://files.pythonhosted.org/packages/3f/93/f73b61353b2a699d489e782c3f5998b59f974ec3156a2050a52dfd7e8946/yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe", size = 101093 }, + { url = "https://files.pythonhosted.org/packages/ea/1f/70c57b3d7278e94ed22d85e09685d3f0a38ebdd8c5c73b65ba4c0d0fe002/yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124", size = 46124 }, +] + +[[package]] +name = "youtube-transcript-api" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "defusedxml" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/32/f60d87a99c05a53604c58f20f670c7ea6262b55e0bbeb836ffe4550b248b/youtube_transcript_api-1.0.3.tar.gz", hash = "sha256:902baf90e7840a42e1e148335e09fe5575dbff64c81414957aea7038e8a4db46", size = 2153252 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/44/40c03bb0f8bddfb9d2beff2ed31641f52d96c287ba881d20e0c074784ac2/youtube_transcript_api-1.0.3-py3-none-any.whl", hash = "sha256:d1874e57de65cf14c9d7d09b2b37c814d6287fa0e770d4922c4cd32a5b3f6c47", size = 2169911 }, +] diff --git a/mcp_clients/Dockerfile.discord b/mcp_clients/Dockerfile.discord deleted file mode 100644 index d64bd67f..00000000 --- a/mcp_clients/Dockerfile.discord +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.12-slim - -# Set working directory -WORKDIR /app - -# Install required system dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - gcc \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Copy necessary files -COPY mcp_clients/ . -RUN pip install --no-cache-dir -r requirements.txt - -# Set environment variable to indicate we're running in Docker -ENV RUNNING_IN_DOCKER=true - -# Command to run the Discord bot -CMD ["python", "discord_bot.py"] \ No newline at end of file diff --git a/mcp_clients/Dockerfile.slack b/mcp_clients/Dockerfile.slack deleted file mode 100644 index 8bae57c9..00000000 --- a/mcp_clients/Dockerfile.slack +++ /dev/null @@ -1,23 +0,0 @@ -FROM python:3.12-slim - -# Set working directory -WORKDIR /app - -# Install required system dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - gcc \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Copy necessary files -COPY mcp_clients/ . -RUN pip install --no-cache-dir -r requirements.txt - -# Set environment variable to indicate we're running in Docker -ENV RUNNING_IN_DOCKER=true - -# Expose the port the app runs on -EXPOSE 8080 - -# Command to run the Slack bot using Uvicorn -CMD ["python", "slack_bot.py"] \ No newline at end of file diff --git a/mcp_clients/Dockerfile.web b/mcp_clients/Dockerfile.web deleted file mode 100644 index efd5c07c..00000000 --- a/mcp_clients/Dockerfile.web +++ /dev/null @@ -1,23 +0,0 @@ -FROM python:3.12-slim - -# Set working directory -WORKDIR /app - -# Install required system dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - gcc \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Copy necessary files -COPY mcp_clients/ . -RUN pip install --no-cache-dir -r requirements.txt - -# Set environment variable to indicate we're running in Docker -ENV RUNNING_IN_DOCKER=true - -# Expose the port the app runs on -EXPOSE 8080 - -# Command to run the Web bot using Uvicorn -CMD ["python", "web_bot.py"] \ No newline at end of file diff --git a/mcp_clients/README-Discord.md b/mcp_clients/README-Discord.md deleted file mode 100644 index fc52d356..00000000 --- a/mcp_clients/README-Discord.md +++ /dev/null @@ -1,116 +0,0 @@ -# Klavis AI Discord Bot (MCP Client) - Local Development - -This document provides instructions for setting up and running the Klavis AI Discord Bot locally for development and testing purposes. This bot acts as a client for the Multi-Compute Platform (MCP), allowing users to interact with connected MCP servers and utilize their tools through Discord. - -**Note:** This README is intended for developers or users who want to run the bot on their own machine. For regular use, please invite the official Klavis AI bot available through [www.klavis.ai](https://www.klavis.ai). The local development version runs with `USE_PRODUCTION_DB=False`, which uses local configuration files and might have different behavior or features compared to the hosted production bot (e.g., user verification is skipped). - -## Prerequisites - -* **Python:** Version 3.12 or higher. -* **Docker:** Recommended for easiest setup and execution. ([Docker Desktop](https://www.docker.com/products/docker-desktop/)) -* **Git:** For cloning the repository. -* **Discord Bot Token:** You need to create a Discord application and bot user to get a token. See [Discord Developer Portal](https://discord.com/developers/docs/intro). - -## Setup - -1. **Clone the Repository:** - ```bash - git clone # Replace with the actual URL - cd klavis # Navigate to the root directory of the project - ``` - -2. **Environment Variables:** - * Create a file named `.env` in the root directory (`klavis`). - * Copy the example below and fill in your specific values: - - ```ini - # .env example - DISCORD_TOKEN="YOUR_DISCORD_BOT_TOKEN" - WEBSITE_URL="https://www.klavis.ai/" # Or http://localhost:3000 if running web UI locally - OPENAI_API_KEY="YOUR_OPENAI_API_KEY" # Needed for the default LLM in local mode - - # Optional: Set to true to use production database (NOT recommended for local dev) - # USE_PRODUCTION_DB=False - ``` - * Replace `"YOUR_DISCORD_BOT_TOKEN"` with the token obtained from the Discord Developer Portal. - * Replace `"YOUR_OPENAI_API_KEY"` with your OpenAI API key. Local development mode defaults to using an OpenAI model (`gpt-4o`). - * `WEBSITE_URL` is used for generating login links (though login is bypassed in local mode). Point it to the production site or your local web UI instance. - * `USE_PRODUCTION_DB` defaults to `False` if omitted, which is the correct setting for local development. - -3. **Local MCP Servers Configuration:** - * When running locally (`USE_PRODUCTION_DB=False`), the bot reads the list of MCP server URLs to connect to from `mcp_clients/local_mcp_servers.json`. - * Create this file if it doesn't exist. - * Add the URLs of the MCP servers you want the local bot to connect to. - - ```json - // mcp_clients/local_mcp_servers.json example - { - "server_urls": [ - "http://localhost:8000/sse" - // Add other local or remote MCP server SSE endpoints here - ] - } - ``` - * Replace `http://localhost:8000/sse` with the actual URL of your running MCP server(s). - -## Running the Bot - -You can run the bot using Docker (recommended) or directly with Python in a virtual environment. Make sure you are in the `klavis` root directory. - -### Method 1: Docker (Recommended) - -1. **Build the Docker Image:** - ```bash - docker build -t klavis-discord-bot -f mcp_clients/Dockerfile.discord . - ``` - *(Note: The `.` at the end is important - it specifies the build context as the current directory)* - -2. **Run the Docker Container:** - This command runs the bot using the environment variables from your `.env` file and mounts your local `local_mcp_servers.json` into the container. - ```bash - docker run --rm --env-file .env -v ./mcp_clients/local_mcp_servers.json:/app/local_mcp_servers.json klavis-discord-bot - ``` - * `--rm`: Automatically removes the container when it exits. - * `--env-file .env`: Loads environment variables from the `.env` file in your current directory (`klavis`). - * `-v ./mcp_clients/local_mcp_servers.json:/app/local_mcp_servers.json`: Mounts your local JSON config into the expected path (`/app/local_mcp_servers.json`) inside the container. - -### Method 2: Python Virtual Environment - -1. **Create and Activate Virtual Environment:** - ```bash - # Create environment (only needs to be done once) - python -m venv venv - - # Activate environment - # Windows (Command Prompt/PowerShell): - .\venv\Scripts\activate - # macOS/Linux (bash/zsh): - source venv/bin/activate - ``` - -2. **Install Dependencies:** - ```bash - pip install -r mcp_clients/requirements.txt - ``` - -3. **Run the Bot:** - Ensure your `.env` file exists in the `klavis` root and `mcp_clients/local_mcp_servers.json` is configured. - ```bash - python mcp_clients/discord_bot.py - ``` - -## Usage - -1. **Invite the Bot:** Go to your Discord application in the Developer Portal, navigate to the "OAuth2" > "URL Generator" section. Select the `bot` and `application.commands` scopes. Choose necessary permissions (e.g., `Send Messages`, `Read Message History`, `Embed Links`, `Create Public Threads`). Copy the generated URL and use it to invite the bot to your test server. -2. **Interact:** - * Mention the bot in a channel: `@YourBotName your query here` - * Send a Direct Message (DM) to the bot. - * When first interacting, the bot might send a message about linking your account (using the `WEBSITE_URL`). In local mode (`USE_PRODUCTION_DB=False`), user verification is skipped, so you can proceed to interact with the bot directly. - * The bot will connect to the MCP servers listed in `local_mcp_servers.json`, process your query using the configured LLM (OpenAI by default locally), and potentially use tools from the connected servers to respond. - -## Development Notes - -* The bot uses `asyncio` for asynchronous operations. -* Logging is directed to the standard output/console. -* Key libraries include `discord.py` (Discord interaction), `mcp-client` (MCP communication), `python-dotenv` (environment variables), `openai`/`anthropic` (LLM interaction). -* Refer to `discord_bot.py`, `base_bot.py`, and `mcp_client.py` for core logic. \ No newline at end of file diff --git a/mcp_clients/README.md b/mcp_clients/README.md deleted file mode 100644 index 3fc548e6..00000000 --- a/mcp_clients/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# MCP Clients - -This directory contains the client implementations for interacting with the Multi-Compute Platform (MCP). - -## Available Clients - -* **Discord:** For instructions on setting up and running the Discord bot locally, see [README-Discord.md](./README-Discord.md). -* **Slack:** For instructions on setting up and running the Slack bot locally, see [README-Slack.md](./README-Slack.md). \ No newline at end of file diff --git a/mcp_clients/database b/mcp_clients/database deleted file mode 160000 index 9f9785ff..00000000 --- a/mcp_clients/database +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9f9785ffd4088a5028bcde21a7f705996c1da079 diff --git a/mcp_clients/llms/__init__.py b/mcp_clients/llms/__init__.py deleted file mode 100644 index 39060b81..00000000 --- a/mcp_clients/llms/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -from .base import ( - BaseLLMConfig, - BaseLLM, - ChatMessage, - LLMMessageFormat, - MessageRole, - TextContent, - ToolCallContent, - ToolResultContent, - FileContent, - ContentType, - Conversation, -) -from .anthropic import Anthropic -from .openai import OpenAI - -__all__ = [ - "BaseLLMConfig", - "BaseLLM", - "ChatMessage", - "LLMMessageFormat", - "MessageRole", - "TextContent", - "ToolCallContent", - "ToolResultContent", - "FileContent", - "ContentType", - "Conversation", - "Anthropic", - "OpenAI", -] diff --git a/mcp_clients/requirements.txt b/mcp_clients/requirements.txt deleted file mode 100644 index 43b95534..00000000 --- a/mcp_clients/requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -discord.py>=2.3.0 -anthropic>=0.49.0 -python-dotenv>=1.1.0 -anyio>=4.9.0 -trio>=0.22.0 -mcp>=1.6.0 -supabase>=2.15.0 -fastapi>=0.115.12 -uvicorn>=0.34.0 -slack-bolt>=1.23.0 -httpx>=0.28.1 -openai>=1.72.0 -markitdown[all] \ No newline at end of file diff --git a/mcp_servers/.eslintignore b/mcp_servers/.eslintignore new file mode 100644 index 00000000..40ea4547 --- /dev/null +++ b/mcp_servers/.eslintignore @@ -0,0 +1,7 @@ +node_modules/ +dist/ +build/ +*.d.ts +coverage/ +**/*.js +!.eslintrc.js \ No newline at end of file diff --git a/mcp_servers/.eslintrc.js b/mcp_servers/.eslintrc.js new file mode 100644 index 00000000..c4db5ec9 --- /dev/null +++ b/mcp_servers/.eslintrc.js @@ -0,0 +1,45 @@ +// Base ESLint configuration for all MCP servers +module.exports = { + root: true, // This is now the root config + parser: '@typescript-eslint/parser', + parserOptions: { + ecmaVersion: 2022, + sourceType: 'module', + tsconfigRootDir: '.', + project: './tsconfig.json', // Points to the mcp_servers tsconfig + }, + plugins: ['@typescript-eslint', 'prettier'], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'prettier', // Avoid conflicts with prettier formatting + 'plugin:prettier/recommended', + ], + env: { + node: true, + es2022: true, + }, + rules: { + // MCP server-specific rules + 'no-console': ['error', { allow: ['warn', 'error'] }], + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-explicit-any': 'error', + '@typescript-eslint/no-unused-vars': [ + 'error', + { argsIgnorePattern: '^_', varsIgnorePattern: '^_' }, + ], + '@typescript-eslint/no-non-null-assertion': 'warn', + 'no-duplicate-imports': 'error', + 'prettier/prettier': 'error', + }, + overrides: [ + { + files: ['**/*.test.ts', '**/*.spec.ts'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-explicit-any': 'warn', + }, + }, + ], + ignorePatterns: ['node_modules', 'dist', 'build'], +}; \ No newline at end of file diff --git a/mcp_servers/.prettierignore b/mcp_servers/.prettierignore new file mode 100644 index 00000000..2b603878 --- /dev/null +++ b/mcp_servers/.prettierignore @@ -0,0 +1,19 @@ +# Build outputs +dist/ +build/ +out/ + +# Dependencies +node_modules/ + +# Coverage reports +coverage/ + +# Generated files +*.generated.* + +# Misc +.DS_Store +.env +.env.* +*.log \ No newline at end of file diff --git a/mcp_servers/.prettierrc.json b/mcp_servers/.prettierrc.json new file mode 100644 index 00000000..e0c9e0f7 --- /dev/null +++ b/mcp_servers/.prettierrc.json @@ -0,0 +1,8 @@ +{ + "semi": true, + "trailingComma": "all", + "singleQuote": true, + "printWidth": 100, + "tabWidth": 2, + "endOfLine": "lf" +} \ No newline at end of file diff --git a/mcp_servers/README.md b/mcp_servers/README.md new file mode 100644 index 00000000..d99129e2 --- /dev/null +++ b/mcp_servers/README.md @@ -0,0 +1,319 @@ +
+ + + +
+ +

Klavis AI - Production-Ready MCP Servers

+

🐳 Self-Hosted Solutions | 🌐 Hosted MCP Service | šŸ” Enterprise OAuth

+ +
+ +[![Documentation](https://img.shields.io/badge/Documentation-šŸ“–-green)](https://www.klavis.ai/docs) +[![Website](https://img.shields.io/badge/Website-🌐-purple)](https://www.klavis.ai) +[![Discord](https://img.shields.io/badge/Discord-Join-7289DA?logo=discord&logoColor=white)](https://discord.gg/p7TuTEcssn) +[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) +[![Docker Images](https://img.shields.io/badge/Docker-ghcr.io-blue?logo=docker)](https://github.com/orgs/klavis-ai/packages) + +
+ +## šŸš€ Quick Start - Run Any MCP Server in 30 Seconds + +### 🐳 Using Docker (For Self-Hosting) +[Get Free API Key →](https://www.klavis.ai/home/api-keys) + +```bash +# Run Github MCP Server with OAuth Support through Klavis AI +docker pull ghcr.io/klavis-ai/github-mcp-server:latest +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/github-mcp-server:latest +``` + +```bash +# Or run GitHub MCP Server (manually add token) +docker pull ghcr.io/klavis-ai/github-mcp-server:latest +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"ghp_your_github_token_here"}' \ + ghcr.io/klavis-ai/github-mcp-server:latest +``` + +**Note:** The MCP server runs on port 5000 and exposes the MCP protocol at the `/mcp` path. + +Example running in Cursor +```json +{ + "mcpServers": { + "github": { + "url": "http://localhost:5000/mcp/" + } + } +} +``` + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to 50+ MCP servers with our managed infrastructure - **no setup required** + +[Get Free API Key →](https://www.klavis.ai/home/api-keys) + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="Your-Klavis-API-Key") +server = klavis.mcp_server.create_server_instance("GMAIL", "user123") +``` + +Example running in Cursor + +```json +{ + "mcpServers": { + "klavis-gmail": { + "url": "https://gmail-mcp-server.klavis.ai/mcp/?instance_id=your-instance" + }, + "klavis-github": { + "url": "https://github-mcp-server.klavis.ai/mcp/?instance_id=your-instance" + } + } +} +``` + +**šŸ’” Get your personalized configuration instantly:** + +1. **šŸ”— [Visit our MCP Servers page →](https://www.klavis.ai/home/mcp-servers)** +2. **Select any service** (Gmail, GitHub, Slack, etc.) +3. **Copy the generated configuration** for your tool +4. **Paste into Claude Desktop config** - done! + +## ✨ Enterprise-Grade MCP Infrastructure + +- **🌐 Hosted Service**: Production-ready managed infrastructure with 99.9% uptime SLA +- **šŸ” Enterprise OAuth**: Seamless authentication for Google, GitHub, Slack, Salesforce, etc. +- **šŸ› ļø 50+ Integrations**: CRM, productivity tools, databases, social media, and more +- **šŸš€ Instant Deployment**: Zero-config setup for Claude Desktop, VS Code, Cursor +- **šŸ¢ Enterprise Ready**: SOC2 compliant, GDPR ready, with dedicated support +- **šŸ“– Open Source**: Full source code available for customization and self-hosting + +## šŸŽÆ Self Hosting Instructions + +### 1. 🐳 Docker Images (Fastest Way to Start) + +Perfect for trying out MCP servers or integrating with AI tools like Claude Desktop. + +**Available Images:** +- `ghcr.io/klavis-ai/{server-name}-mcp-server:latest` - Server with OAuth support +- `ghcr.io/klavis-ai/{server-name}-mcp-server:commit-id` - Server builld by selected commit ID + +[**šŸ” Browse All Docker Images →**](https://github.com/orgs/Klavis-AI/packages?repo_name=klavis) + +```bash +# Example: GitHub MCP Server +docker pull ghcr.io/klavis-ai/github-mcp-server:latest +docker run -p 5000:5000 ghcr.io/klavis-ai/github-mcp-server:latest + +# Example: Gmail with OAuth (requires API key) +docker pull ghcr.io/klavis-ai/gmail-mcp-server:latest +docker run -it -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gmail-mcp-server:latest +``` + +[**šŸ”— Get Free API Key →**](https://www.klavis.ai/home/api-keys) + +### 2. šŸ—ļø Build from Source + +Clone and run any MCP server locally (with or without Docker): + +```bash +git clone https://github.com/klavis-ai/klavis.git +cd klavis/mcp_servers/github + +# Option A: Using Docker +docker build -t github-mcp . +docker run -p 5000:5000 github-mcp + +# Option B: Run directly (Go example) +go mod download +go run server.go + +# Option C: Python servers +cd ../youtube +pip install -r requirements.txt +python server.py + +# Option D: Node.js servers +cd ../slack +npm install +npm start +``` + +Each server includes detailed setup instructions in its individual README. + +Use our managed infrastructure - no Docker required: + +```bash +pip install klavis # or npm install klavis +``` + +## šŸ› ļø Available MCP Servers + +| Service | Docker Image | OAuth Required | Description | +|---------|--------------|----------------|-------------| +| **GitHub** | `ghcr.io/klavis-ai/github-mcp-server` | āœ… | Repository management, issues, PRs | +| **Gmail** | `ghcr.io/klavis-ai/gmail-mcp-server:latest` | āœ… | Email reading, sending, management | +| **Google Sheets** | `ghcr.io/klavis-ai/google_sheets-mcp-server:latest` | āœ… | Spreadsheet operations | +| **YouTube** | `ghcr.io/klavis-ai/youtube-mcp-server` | āŒ | Video information, search | +| **Slack** | `ghcr.io/klavis-ai/slack-mcp-server:latest` | āœ… | Channel management, messaging | +| **Notion** | `ghcr.io/klavis-ai/notion-mcp-server:latest` | āœ… | Database and page operations | +| **Salesforce** | `ghcr.io/klavis-ai/salesforce-mcp-server:latest` | āœ… | CRM data management | +| **Postgres** | `ghcr.io/klavis-ai/postgres-mcp-server` | āŒ | Database operations | +| ... | ... | ...| ... | + +And more! +[**šŸ” View All 50+ Servers →**](https://www.klavis.ai/docs/introduction#mcp-server-quickstart) | [**🐳 Browse Docker Images →**](https://github.com/orgs/Klavis-AI/packages?repo_name=klavis) + +## šŸ’” Usage Examples + +For existing MCP implementations: + +**Python** +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-key") +server = klavis.mcp_server.create_server_instance( + server_name="YOUTUBE", + user_id="user123" +) +``` + +**TypeScript** +```typescript +import { KlavisClient } from 'klavis'; + +const klavis = new KlavisClient({ apiKey: 'your-key' }); +const server = await klavis.mcpServer.createServerInstance({ + serverName: "Gmail", + userId: "user123" +}); +``` + +### With AI Frameworks + +**OpenAI Function Calling** +```python +from openai import OpenAI +from klavis import Klavis + +klavis = Klavis(api_key="your-key") +openai = OpenAI(api_key="your-openai-key") + +# Create server and get tools +server = klavis.mcp_server.create_server_instance("YOUTUBE", "user123") +tools = klavis.mcp_server.list_tools(server.server_url, format="OPENAI") + +# Use with OpenAI +response = openai.chat.completions.create( + model="gpt-4o-mini", + messages=[{"role": "user", "content": "Summarize this video: https://..."}], + tools=tools.tools +) +``` + +[**šŸ“– View Complete Examples →**](examples/) + +## 🌐 Hosted MCP Service - Zero Setup Required + +**Perfect for individuals and businesses who want instant access without infrastructure complexity:** + +### ✨ **Why Choose Our Hosted Service:** +- **šŸš€ Instant Setup**: Get any MCP server running in 30 seconds +- **šŸ” OAuth Handled**: No complex authentication setup required +- **šŸ—ļø No Infrastructure**: Everything runs on our secure, scalable cloud +- **šŸ“ˆ Auto-Scaling**: From prototype to production seamlessly +- **šŸ”„ Always Updated**: Latest MCP server versions automatically +- **šŸ’° Cost-Effective**: Pay only for what you use, free tier available + +### šŸ’» **Quick Integration:** + +```python +from klavis import Klavis + +# Get started with just an API key +klavis = Klavis(api_key="Your-Klavis-API-Key") + +# Create any MCP server instantly +gmail_server = klavis.mcp_server.create_server_instance( + server_name="GMAIL", + user_id="your-user-id" +) + +# Server is ready to use immediately +print(f"Gmail MCP server ready: {gmail_server.server_url}") +``` + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** | **šŸ“– [Complete Documentation →](https://www.klavis.ai/docs)** + +## šŸ” OAuth Authentication (For OAuth-Enabled Servers) + +Some servers require OAuth authentication (Google, GitHub, Slack, etc.). OAuth implementation requires significant setup and code complexity: + +```bash +# Run with OAuth support (requires free API key) +docker pull ghcr.io/klavis-ai/gmail-mcp-server:latest +docker run -it -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gmail-mcp-server:latest + +# Follow the displayed URL to authenticate +# Server starts automatically after authentication +``` + +**Why OAuth needs additional implementation?** +- šŸ”§ **Complex Setup**: Each service requires creating OAuth apps with specific redirect URLs, scopes, and credentials +- šŸ“ **Implementation Overhead**: OAuth 2.0 flow requires callback handling, token refresh, and secure storage +- šŸ”‘ **Credential Management**: Managing multiple OAuth app secrets across different services +- šŸ”„ **Token Lifecycle**: Handling token expiration, refresh, and error cases + +Our OAuth wrapper simplifies this by handling all the complex OAuth implementation details, so you can focus on using the MCP servers directly. + +**Alternative**: For advanced users, you can implement OAuth yourself by creating apps with each service provider. Check individual server READMEs for technical details. + +## šŸ“š Resources & Community + +| Resource | Link | Description | +|----------|------|-------------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | Complete guides and API reference | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | Get help and connect with users | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | Report bugs and request features | +| **šŸ“¦ Examples** | [examples/](examples/) | Working examples with popular AI frameworks | +| **šŸ”§ Server Guides** | [mcp_servers/](mcp_servers/) | Individual server documentation | + +## šŸ¤ Contributing + +We love contributions! Whether you want to: +- šŸ› Report bugs or request features +- šŸ“ Improve documentation +- šŸ”§ Build new MCP servers +- šŸŽØ Enhance existing servers + +Check out our [Contributing Guide](CONTRIBUTING.md) to get started! + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord • + Examples +

+
diff --git a/mcp_servers/affinity/.env.example b/mcp_servers/affinity/.env.example new file mode 100644 index 00000000..863c416d --- /dev/null +++ b/mcp_servers/affinity/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +AFFINITY_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/affinity/Dockerfile b/mcp_servers/affinity/Dockerfile new file mode 100644 index 00000000..397c47ca --- /dev/null +++ b/mcp_servers/affinity/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/affinity/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/affinity/server.py . +COPY mcp_servers/affinity/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/affinity/README.md b/mcp_servers/affinity/README.md new file mode 100644 index 00000000..8d226e7a --- /dev/null +++ b/mcp_servers/affinity/README.md @@ -0,0 +1,78 @@ +# Affinity MCP Server + +A Model Context Protocol (MCP) server for Affinity CRM integration. Manage relationships, deals, and business networks using Affinity's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Affinity with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("AFFINITY", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/affinity-mcp-server:latest + + +# Run Affinity MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/affinity-mcp-server:latest + + +# Run Affinity MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_affinity_api_key_here"}' \ + ghcr.io/klavis-ai/affinity-mcp-server:latest +``` + +**OAuth Setup:** Affinity requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Relationship Management**: Manage people, organizations, and relationships +- **Deal Tracking**: Handle opportunities and deal pipeline +- **List Operations**: Create and manage custom lists and fields +- **Network Analysis**: Analyze relationship networks and connections +- **Data Sync**: Sync data with external systems and integrations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/affinity/requirements.txt b/mcp_servers/affinity/requirements.txt new file mode 100644 index 00000000..81aab075 --- /dev/null +++ b/mcp_servers/affinity/requirements.txt @@ -0,0 +1,9 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/affinity/server.py b/mcp_servers/affinity/server.py new file mode 100644 index 00000000..6f73b468 --- /dev/null +++ b/mcp_servers/affinity/server.py @@ -0,0 +1,1420 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + get_current_user, + get_all_list_entries_on_a_list, get_metadata_on_all_lists, get_metadata_on_a_single_list, get_metadata_on_a_single_list_fields, get_a_single_list_entry_on_a_list, + get_all_persons, get_single_person, get_person_fields_metadata, get_person_lists, get_person_list_entries, search_persons, + get_all_companies, get_single_company, get_company_fields_metadata, get_company_lists, get_company_list_entries, search_organizations, + get_all_opportunities, get_single_opportunity, search_opportunities, + get_all_notes, get_specific_note +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +AFFINITY_MCP_SERVER_PORT = int(os.getenv("AFFINITY_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token') or auth_json.get('api_key') or auth_json.get('token') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=AFFINITY_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("affinity-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Authentication + types.Tool( + name="affinity_get_current_user", + description="Get current user information from Affinity.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_USER", "readOnlyHint": True} + ), + ), + # Lists + types.Tool( + name="affinity_get_all_list_entries_on_a_list", + description="Get all List Entries on a List.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": { + "type": "integer", + "description": "The ID of the list.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, list, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_LIST", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_metadata_on_all_lists", + description="Get metadata on all Lists.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_LIST", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_metadata_on_a_single_list", + description="Get metadata on a single List.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": { + "type": "integer", + "description": "The ID of the list.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_LIST", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_metadata_on_a_single_list_fields", + description="Get metadata on a single List's Fields.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": { + "type": "integer", + "description": "The ID of the list.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_LIST", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_a_single_list_entry_on_a_list", + description="Get a single List Entry on a List.", + inputSchema={ + "type": "object", + "required": ["list_id", "list_entry_id"], + "properties": { + "list_id": { + "type": "integer", + "description": "The ID of the list.", + }, + "list_entry_id": { + "type": "integer", + "description": "The ID of the list entry.", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, list, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_LIST", "readOnlyHint": True} + ), + ), + # Persons + types.Tool( + name="affinity_get_all_persons", + description="Get all Persons in Affinity.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + "ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "Person IDs to filter by.", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_single_person", + description="Get a single Person by ID.", + inputSchema={ + "type": "object", + "required": ["person_id"], + "properties": { + "person_id": { + "type": "integer", + "description": "The ID of the person to retrieve.", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_person_fields_metadata", + description="Get metadata on Person Fields.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_person_lists", + description="Get a Person's Lists.", + inputSchema={ + "type": "object", + "required": ["person_id"], + "properties": { + "person_id": { + "type": "integer", + "description": "The ID of the person.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_person_list_entries", + description="Get a Person's List Entries.", + inputSchema={ + "type": "object", + "required": ["person_id"], + "properties": { + "person_id": { + "type": "integer", + "description": "The ID of the person.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + # Companies + types.Tool( + name="affinity_get_all_companies", + description="Get all Companies in Affinity with basic information and field data.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + "ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "Company IDs to filter by.", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_COMPANY", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_single_company", + description="Get a single Company by ID with basic information and field data.", + inputSchema={ + "type": "object", + "required": ["company_id"], + "properties": { + "company_id": { + "type": "integer", + "description": "The ID of the company to retrieve.", + }, + "field_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Field IDs for field data.", + }, + "field_types": { + "type": "array", + "items": {"type": "string"}, + "description": "Field types (enriched, global, relationship-intelligence).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_COMPANY", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_company_fields_metadata", + description="Get metadata on Company Fields.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_COMPANY", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_company_lists", + description="Get all Lists that contain the specified Company.", + inputSchema={ + "type": "object", + "required": ["company_id"], + "properties": { + "company_id": { + "type": "integer", + "description": "The ID of the company.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_COMPANY", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_company_list_entries", + description="Get List Entries for a Company across all Lists with field data.", + inputSchema={ + "type": "object", + "required": ["company_id"], + "properties": { + "company_id": { + "type": "integer", + "description": "The ID of the company.", + }, + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_COMPANY", "readOnlyHint": True} + ), + ), + # Opportunities + types.Tool( + name="affinity_get_all_opportunities", + description="Get all Opportunities in Affinity.", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Cursor for pagination.", + }, + "limit": { + "type": "integer", + "description": "Number of items per page (1-100, default 100).", + }, + "ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "Opportunity IDs to filter by.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_OPPORTUNITY", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_single_opportunity", + description="Get a single Opportunity by ID.", + inputSchema={ + "type": "object", + "required": ["opportunity_id"], + "properties": { + "opportunity_id": { + "type": "integer", + "description": "The ID of the opportunity to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_OPPORTUNITY", "readOnlyHint": True} + ), + ), + # Search Tools + types.Tool( + name="affinity_search_persons", + description="Search for persons in Affinity. Search term can be part of an email address, first name, or last name.", + inputSchema={ + "type": "object", + "properties": { + "term": { + "type": "string", + "description": "Search term for finding persons (email, first name, or last name).", + }, + "with_interaction_dates": { + "type": "boolean", + "description": "Include interaction dates in the response.", + }, + "with_interaction_persons": { + "type": "boolean", + "description": "Include persons for each interaction.", + }, + "with_opportunities": { + "type": "boolean", + "description": "Include opportunity IDs for each person.", + }, + "with_current_organizations": { + "type": "boolean", + "description": "Include current organization IDs for each person.", + }, + "page_size": { + "type": "integer", + "description": "Number of results per page (default 500).", + }, + "page_token": { + "type": "string", + "description": "Token for pagination to get next page of results.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_PERSON", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_search_organizations", + description="Search for organizations / companies in Affinity. Search term can be part of organization name or domain.", + inputSchema={ + "type": "object", + "properties": { + "term": { + "type": "string", + "description": "Search term for finding organizations / companies (name or domain).", + }, + "with_interaction_dates": { + "type": "boolean", + "description": "Include interaction dates in the response.", + }, + "with_interaction_persons": { + "type": "boolean", + "description": "Include persons for each interaction.", + }, + "with_opportunities": { + "type": "boolean", + "description": "Include opportunity IDs for each organization.", + }, + "page_size": { + "type": "integer", + "description": "Number of results per page (default 500).", + }, + "page_token": { + "type": "string", + "description": "Token for pagination to get next page of results.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_ORGANIZATION", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_search_opportunities", + description="Search for opportunities in Affinity. Search term can be part of opportunity name.", + inputSchema={ + "type": "object", + "properties": { + "term": { + "type": "string", + "description": "Search term for finding opportunities (name).", + }, + "page_size": { + "type": "integer", + "description": "Number of results per page (default 500).", + }, + "page_token": { + "type": "string", + "description": "Token for pagination to get next page of results.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_OPPORTUNITY", "readOnlyHint": True} + ), + ), + # Notes + types.Tool( + name="affinity_get_all_notes", + description="Get all Notes in Affinity.", + inputSchema={ + "type": "object", + "properties": { + "person_id": { + "type": "integer", + "description": "Filter by person ID", + }, + "organization_id": { + "type": "integer", + "description": "Filter by organization ID", + }, + "opportunity_id": { + "type": "integer", + "description": "Filter by opportunity ID", + }, + "page_size": { + "type": "integer", + "description": "Number of items per page", + }, + "page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_NOTE", "readOnlyHint": True} + ), + ), + types.Tool( + name="affinity_get_specific_note", + description="Get a specific note by ID.", + inputSchema={ + "type": "object", + "required": ["note_id"], + "properties": { + "note_id": { + "type": "integer", + "description": "Note ID", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "AFFINITY_NOTE", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # Auth + if name == "affinity_get_current_user": + try: + result = await get_current_user() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Lists + elif name == "affinity_get_all_list_entries_on_a_list": + list_id = arguments.get("list_id") + if not list_id: + return [ + types.TextContent( + type="text", + text="Error: list_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_all_list_entries_on_a_list(list_id, cursor, limit, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_metadata_on_all_lists": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_metadata_on_all_lists(cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_metadata_on_a_single_list": + list_id = arguments.get("list_id") + if not list_id: + return [ + types.TextContent( + type="text", + text="Error: list_id parameter is required", + ) + ] + + try: + result = await get_metadata_on_a_single_list(list_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_metadata_on_a_single_list_fields": + list_id = arguments.get("list_id") + if not list_id: + return [ + types.TextContent( + type="text", + text="Error: list_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_metadata_on_a_single_list_fields(list_id, cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_a_single_list_entry_on_a_list": + list_id = arguments.get("list_id") + list_entry_id = arguments.get("list_entry_id") + if not list_id or not list_entry_id: + return [ + types.TextContent( + type="text", + text="Error: list_id and list_entry_id parameters are required", + ) + ] + + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_a_single_list_entry_on_a_list(list_id, list_entry_id, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Persons + elif name == "affinity_get_all_persons": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + ids = arguments.get("ids") + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_all_persons(cursor, limit, ids, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_single_person": + person_id = arguments.get("person_id") + if not person_id: + return [ + types.TextContent( + type="text", + text="Error: person_id parameter is required", + ) + ] + + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_single_person(person_id, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_person_fields_metadata": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_person_fields_metadata(cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_person_lists": + person_id = arguments.get("person_id") + if not person_id: + return [ + types.TextContent( + type="text", + text="Error: person_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_person_lists(person_id, cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_person_list_entries": + person_id = arguments.get("person_id") + if not person_id: + return [ + types.TextContent( + type="text", + text="Error: person_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_person_list_entries(person_id, cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Companies + elif name == "affinity_get_all_companies": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + ids = arguments.get("ids") + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_all_companies(cursor, limit, ids, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_single_company": + company_id = arguments.get("company_id") + if not company_id: + return [ + types.TextContent( + type="text", + text="Error: company_id parameter is required", + ) + ] + + field_ids = arguments.get("field_ids") + field_types = arguments.get("field_types") + + try: + result = await get_single_company(company_id, field_ids, field_types) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_company_fields_metadata": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_company_fields_metadata(cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_company_lists": + company_id = arguments.get("company_id") + if not company_id: + return [ + types.TextContent( + type="text", + text="Error: company_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_company_lists(company_id, cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_company_list_entries": + company_id = arguments.get("company_id") + if not company_id: + return [ + types.TextContent( + type="text", + text="Error: company_id parameter is required", + ) + ] + + cursor = arguments.get("cursor") + limit = arguments.get("limit") + + try: + result = await get_company_list_entries(company_id, cursor, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + # Opportunities + elif name == "affinity_get_all_opportunities": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + ids = arguments.get("ids") + + try: + result = await get_all_opportunities(cursor, limit, ids) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_single_opportunity": + opportunity_id = arguments.get("opportunity_id") + if not opportunity_id: + return [ + types.TextContent( + type="text", + text="Error: opportunity_id parameter is required", + ) + ] + try: + result = await get_single_opportunity(opportunity_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Search Tools + elif name == "affinity_search_persons": + term = arguments.get("term") + with_interaction_dates = arguments.get("with_interaction_dates") + with_interaction_persons = arguments.get("with_interaction_persons") + with_opportunities = arguments.get("with_opportunities") + with_current_organizations = arguments.get("with_current_organizations") + page_size = arguments.get("page_size") + page_token = arguments.get("page_token") + + try: + result = await search_persons(term, with_interaction_dates, with_interaction_persons, with_opportunities, with_current_organizations, page_size, page_token) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_search_organizations": + term = arguments.get("term") + with_interaction_dates = arguments.get("with_interaction_dates") + with_interaction_persons = arguments.get("with_interaction_persons") + with_opportunities = arguments.get("with_opportunities") + page_size = arguments.get("page_size") + page_token = arguments.get("page_token") + + try: + result = await search_organizations(term, with_interaction_dates, with_interaction_persons, with_opportunities, page_size, page_token) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_search_opportunities": + term = arguments.get("term") + page_size = arguments.get("page_size") + page_token = arguments.get("page_token") + + try: + result = await search_opportunities(term, page_size, page_token) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Notes + elif name == "affinity_get_all_notes": + person_id = arguments.get("person_id") + organization_id = arguments.get("organization_id") + opportunity_id = arguments.get("opportunity_id") + page_size = arguments.get("page_size") + page_token = arguments.get("page_token") + + try: + result = await get_all_notes(person_id, organization_id, opportunity_id, page_size, page_token) + + if isinstance(result, dict) and "error" in result: + return [ + types.TextContent( + type="text", + text=f"API Error: {result.get('error', 'Unknown error occurred')}", + ) + ] + + if isinstance(result, dict) and "data" in result and result.get("data") is None: + return [ + types.TextContent( + type="text", + text="No notes found for the specified criteria. The API returned an empty result.", + ) + ] + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "affinity_get_specific_note": + note_id = arguments.get("note_id") + if not note_id: + return [ + types.TextContent( + type="text", + text="Error: note_id parameter is required", + ) + ] + try: + result = await get_specific_note(note_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/affinity/tools/__init__.py b/mcp_servers/affinity/tools/__init__.py new file mode 100644 index 00000000..bca5200a --- /dev/null +++ b/mcp_servers/affinity/tools/__init__.py @@ -0,0 +1,53 @@ +from .auth import get_current_user +from .lists import ( + get_all_list_entries_on_a_list, + get_metadata_on_all_lists, + get_metadata_on_a_single_list, + get_metadata_on_a_single_list_fields, + get_a_single_list_entry_on_a_list +) +from .persons import get_all_persons, get_single_person, get_person_fields_metadata, get_person_lists, get_person_list_entries, search_persons +from .companies import get_all_companies, get_single_company, get_company_fields_metadata, get_company_lists, get_company_list_entries, search_organizations +from .opportunities import get_all_opportunities, get_single_opportunity, search_opportunities +from .notes import get_all_notes, get_specific_note +from .base import auth_token_context + +__all__ = [ + # Auth + "get_current_user", + + # Lists + "get_all_list_entries_on_a_list", + "get_metadata_on_all_lists", + "get_metadata_on_a_single_list", + "get_metadata_on_a_single_list_fields", + "get_a_single_list_entry_on_a_list", + + # Persons + "get_all_persons", + "get_single_person", + "get_person_fields_metadata", + "get_person_lists", + "get_person_list_entries", + "search_persons", + + # Companies + "get_all_companies", + "get_single_company", + "get_company_fields_metadata", + "get_company_lists", + "get_company_list_entries", + "search_organizations", + + # Opportunities + "get_all_opportunities", + "get_single_opportunity", + "search_opportunities", + + # Notes + "get_all_notes", + "get_specific_note", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/affinity/tools/auth.py b/mcp_servers/affinity/tools/auth.py new file mode 100644 index 00000000..8169a770 --- /dev/null +++ b/mcp_servers/affinity/tools/auth.py @@ -0,0 +1,15 @@ +import logging +from typing import Any, Dict +from .base import make_v2_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_current_user() -> Dict[str, Any]: + """Get current user information from Affinity V2 API.""" + logger.info("Executing tool: get_current_user") + try: + return await make_v2_request("GET", "/auth/whoami") + except Exception as e: + logger.exception(f"Error executing tool get_current_user: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/affinity/tools/base.py b/mcp_servers/affinity/tools/base.py new file mode 100644 index 00000000..11126230 --- /dev/null +++ b/mcp_servers/affinity/tools/base.py @@ -0,0 +1,149 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +AFFINITY_API_ENDPOINT = "https://api.affinity.co/" + +# Context variable to store the API key for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +class AffinityV1Client: + """Client for Affinity API V1 using Basic Authentication.""" + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Affinity V1 API.""" + api_key = get_auth_token() + + if not api_key: + raise RuntimeError("No API key provided. Please set the x-auth-token header.") + + # V1 uses HTTP Basic Auth with API key + auth = httpx.BasicAuth("", api_key) + + headers = { + "Content-Type": "application/json" + } + + # V1 endpoints don't have /v1 prefix + url = f"{AFFINITY_API_ENDPOINT}{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, auth=auth, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, auth=auth, headers=headers, json=data) + elif method.upper() == "PUT": + response = await client.put(url, auth=auth, headers=headers, json=data) + elif method.upper() == "DELETE": + response = await client.delete(url, auth=auth, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Handle empty responses for DELETE operations + if response.status_code == 204 or not response.content: + return {"success": True} + + try: + json_response = response.json() + # Handle null/undefined responses + if json_response is None: + return {"data": None, "message": "API returned null response"} + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + +class AffinityV2Client: + """Client for Affinity API V2 using Bearer Authentication.""" + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Affinity V2 API.""" + api_key = get_auth_token() + + if not api_key: + raise RuntimeError("No API key provided. Please set the x-auth-token header.") + + # note: v2 uses Bearer Authentication + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + + url = f"{AFFINITY_API_ENDPOINT}/v2{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data) + elif method.upper() == "PATCH": + response = await client.patch(url, headers=headers, json=data) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Handle empty responses for DELETE operations + if response.status_code == 204 or not response.content: + return {"success": True} + + try: + json_response = response.json() + # Handle null/undefined responses + if json_response is None: + return {"data": None, "message": "API returned null response"} + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + +async def make_http_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make an HTTP request to Affinity V1 API.""" + return await AffinityV1Client.make_request(method, endpoint, data, params) + +async def make_v2_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make an HTTP request to Affinity V2 API.""" + return await AffinityV2Client.make_request(method, endpoint, data, params) \ No newline at end of file diff --git a/mcp_servers/affinity/tools/companies.py b/mcp_servers/affinity/tools/companies.py new file mode 100644 index 00000000..a58e94d3 --- /dev/null +++ b/mcp_servers/affinity/tools/companies.py @@ -0,0 +1,181 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_v2_request, make_http_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_companies( + cursor: Optional[str] = None, + limit: Optional[int] = None, + ids: Optional[List[int]] = None, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get all Companies in Affinity with basic information and field data. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + ids: Company IDs to filter by + field_ids: Field IDs for field data + field_types: Field types (enriched, global, relationship-intelligence) + """ + logger.info("Executing tool: get_all_companies") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + if ids: + params["ids"] = ids + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", "/companies", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_all_companies: {e}") + raise e + +async def get_single_company( + company_id: int, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get a single Company by ID with basic information and field data. + + Args: + company_id: Company ID + field_ids: Field IDs for field data + field_types: Field types (enriched, global, relationship-intelligence) + """ + logger.info(f"Executing tool: get_single_company with company_id: {company_id}") + try: + params = {} + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", f"/companies/{company_id}", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_single_company: {e}") + raise e + +async def get_company_fields_metadata( + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get metadata on Company Fields. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info("Executing tool: get_company_fields_metadata") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", "/companies/fields", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_company_fields_metadata: {e}") + raise e + +async def get_company_lists( + company_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get all Lists that contain the specified Company. + + Args: + company_id: Company ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info(f"Executing tool: get_company_lists with company_id: {company_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", f"/companies/{company_id}/lists", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_company_lists: {e}") + raise e + +async def get_company_list_entries( + company_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get List Entries for a Company across all Lists with field data. + + Args: + company_id: Company ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info(f"Executing tool: get_company_list_entries with company_id: {company_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", f"/companies/{company_id}/list-entries", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_company_list_entries: {e}") + raise e + +async def search_organizations( + term: Optional[str] = None, + with_interaction_dates: Optional[bool] = None, + with_interaction_persons: Optional[bool] = None, + with_opportunities: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None +) -> Dict[str, Any]: + """Search for organizations / companies in Affinity. + + Searches your team's data and fetches all the organizations that meet the search criteria. + The search term can be part of an organization name or domain. + + Args: + term: A string used to search all the organizations in your team's data + with_interaction_dates: When true, interaction dates will be present on the returned resources + with_interaction_persons: When true, persons for each interaction will be returned + with_opportunities: When true, opportunity IDs will be returned for each organization + page_size: How many results to return per page (Default is 500) + page_token: Token from previous response required to retrieve the next page of results + """ + logger.info("Executing tool: search_organizations") + try: + params = {} + if term: + params["term"] = term + if with_interaction_dates is not None: + params["with_interaction_dates"] = with_interaction_dates + if with_interaction_persons is not None: + params["with_interaction_persons"] = with_interaction_persons + if with_opportunities is not None: + params["with_opportunities"] = with_opportunities + if page_size: + params["page_size"] = page_size + if page_token: + params["page_token"] = page_token + + return await make_http_request("GET", "/organizations", params=params) + except Exception as e: + logger.exception(f"Error executing tool search_organizations: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/affinity/tools/lists.py b/mcp_servers/affinity/tools/lists.py new file mode 100644 index 00000000..c0316494 --- /dev/null +++ b/mcp_servers/affinity/tools/lists.py @@ -0,0 +1,127 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_v2_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_list_entries_on_a_list( + list_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get all List Entries on a List. + + Args: + list_id: List ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + field_ids: Field IDs for field data + field_types: Field types (enriched, global, list, relationship-intelligence) + """ + logger.info(f"Executing tool: get_all_list_entries_on_a_list with list_id: {list_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", f"/lists/{list_id}/list-entries", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_all_list_entries_on_a_list: {e}") + raise e + +async def get_metadata_on_all_lists( + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get metadata on all Lists. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info("Executing tool: get_metadata_on_all_lists") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", "/lists", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_metadata_on_all_lists: {e}") + raise e + +async def get_metadata_on_a_single_list(list_id: int) -> Dict[str, Any]: + """Get metadata on a single List. + + Args: + list_id: List ID + """ + logger.info(f"Executing tool: get_metadata_on_a_single_list with list_id: {list_id}") + try: + return await make_v2_request("GET", f"/lists/{list_id}") + except Exception as e: + logger.exception(f"Error executing tool get_metadata_on_a_single_list: {e}") + raise e + +async def get_metadata_on_a_single_list_fields( + list_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get metadata on a single List's Fields. + + Args: + list_id: List ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info(f"Executing tool: get_metadata_on_a_single_list_fields with list_id: {list_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", f"/lists/{list_id}/fields", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_metadata_on_a_single_list_fields: {e}") + raise e + +async def get_a_single_list_entry_on_a_list( + list_id: int, + list_entry_id: int, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get a single List Entry on a List. + + Args: + list_id: List ID + list_entry_id: List Entry ID + field_ids: Field IDs for field data + field_types: Field types (enriched, global, list, relationship-intelligence) + """ + logger.info(f"Executing tool: get_a_single_list_entry_on_a_list with list_id: {list_id}, list_entry_id: {list_entry_id}") + try: + params = {} + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", f"/lists/{list_id}/list-entries/{list_entry_id}", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_a_single_list_entry_on_a_list: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/affinity/tools/notes.py b/mcp_servers/affinity/tools/notes.py new file mode 100644 index 00000000..b9405947 --- /dev/null +++ b/mcp_servers/affinity/tools/notes.py @@ -0,0 +1,54 @@ +import logging +from typing import Any, Dict, Optional +from .base import AffinityV1Client + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_notes( + person_id: Optional[int] = None, + organization_id: Optional[int] = None, + opportunity_id: Optional[int] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None +) -> Dict[str, Any]: + """Get all Notes in Affinity. + + Args: + person_id: Filter by person ID + organization_id: Filter by organization ID + opportunity_id: Filter by opportunity ID + page_size: Number of items per page + page_token: Token for pagination + """ + logger.info("Executing tool: get_all_notes") + try: + params = {} + if person_id: + params["person_id"] = person_id + if organization_id: + params["organization_id"] = organization_id + if opportunity_id: + params["opportunity_id"] = opportunity_id + if page_size: + params["page_size"] = page_size + if page_token: + params["page_token"] = page_token + + return await AffinityV1Client.make_request("GET", "/notes", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_all_notes: {e}") + raise e + +async def get_specific_note(note_id: int) -> Dict[str, Any]: + """Get a specific note by ID. + + Args: + note_id: Note ID + """ + logger.info(f"Executing tool: get_specific_note with note_id: {note_id}") + try: + return await AffinityV1Client.make_request("GET", f"/notes/{note_id}") + except Exception as e: + logger.exception(f"Error executing tool get_specific_note: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/affinity/tools/opportunities.py b/mcp_servers/affinity/tools/opportunities.py new file mode 100644 index 00000000..6c9fde8c --- /dev/null +++ b/mcp_servers/affinity/tools/opportunities.py @@ -0,0 +1,84 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_v2_request, make_http_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_opportunities( + cursor: Optional[str] = None, + limit: Optional[int] = None, + ids: Optional[List[int]] = None +) -> Dict[str, Any]: + """Get all Opportunities in Affinity. + + Returns basic information but NOT field data on each Opportunity. + To access field data on Opportunities, use the lists endpoints. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + ids: Opportunity IDs to filter by + """ + logger.info("Executing tool: get_all_opportunities") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + if ids: + params["ids"] = ids + + return await make_v2_request("GET", "/opportunities", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_all_opportunities: {e}") + raise e + +async def get_single_opportunity(opportunity_id: int) -> Dict[str, Any]: + """Get a single Opportunity by ID. + + Returns basic information but NOT field data on the Opportunity. + To access field data on Opportunities, use the lists endpoints. + + Args: + opportunity_id: Opportunity ID + """ + logger.info(f"Executing tool: get_single_opportunity with opportunity_id: {opportunity_id}") + try: + return await make_v2_request("GET", f"/opportunities/{opportunity_id}") + except Exception as e: + logger.exception(f"Error executing tool get_single_opportunity: {e}") + raise e + +async def search_opportunities( + term: Optional[str] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None +) -> Dict[str, Any]: + """Search for opportunities in Affinity. + + Searches your team's data and fetches all the opportunities that meet the search criteria. + The search term can be part of an opportunity name. + + Args: + term: A string used to search all the opportunities in your team's data + page_size: How many results to return per page (Default is 500) + page_token: Token from previous response required to retrieve the next page of results + """ + logger.info("Executing tool: search_opportunities") + try: + params = {} + if term: + params["term"] = term + if page_size: + params["page_size"] = page_size + if page_token: + params["page_token"] = page_token + + return await make_http_request("GET", "/opportunities", params=params) + except Exception as e: + logger.exception(f"Error executing tool search_opportunities: {e}") + raise e + + \ No newline at end of file diff --git a/mcp_servers/affinity/tools/persons.py b/mcp_servers/affinity/tools/persons.py new file mode 100644 index 00000000..11055579 --- /dev/null +++ b/mcp_servers/affinity/tools/persons.py @@ -0,0 +1,185 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_v2_request, make_http_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_persons( + cursor: Optional[str] = None, + limit: Optional[int] = None, + ids: Optional[List[int]] = None, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get all Persons in Affinity. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + ids: Person IDs to filter by + field_ids: Field IDs for field data + field_types: Field types (enriched, global, relationship-intelligence) + """ + logger.info("Executing tool: get_all_persons") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + if ids: + params["ids"] = ids + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", "/persons", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_all_persons: {e}") + raise e + +async def get_single_person( + person_id: int, + field_ids: Optional[List[str]] = None, + field_types: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get a single Person by ID. + + Args: + person_id: Person ID + field_ids: Field IDs for field data + field_types: Field types (enriched, global, relationship-intelligence) + """ + logger.info(f"Executing tool: get_single_person with person_id: {person_id}") + try: + params = {} + if field_ids: + params["fieldIds"] = field_ids + if field_types: + params["fieldTypes"] = field_types + + return await make_v2_request("GET", f"/persons/{person_id}", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_single_person: {e}") + raise e + +async def get_person_fields_metadata( + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get metadata on Person Fields. + + Args: + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info("Executing tool: get_person_fields_metadata") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", "/persons/fields", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_person_fields_metadata: {e}") + raise e + +async def get_person_lists( + person_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get a Person's Lists. + + Args: + person_id: Person ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info(f"Executing tool: get_person_lists with person_id: {person_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", f"/persons/{person_id}/lists", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_person_lists: {e}") + raise e + +async def get_person_list_entries( + person_id: int, + cursor: Optional[str] = None, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get a Person's List Entries. + + Args: + person_id: Person ID + cursor: Cursor for pagination + limit: Number of items per page (1-100, default 100) + """ + logger.info(f"Executing tool: get_person_list_entries with person_id: {person_id}") + try: + params = {} + if cursor: + params["cursor"] = cursor + if limit: + params["limit"] = limit + + return await make_v2_request("GET", f"/persons/{person_id}/list-entries", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_person_list_entries: {e}") + raise e + +async def search_persons( + term: Optional[str] = None, + with_interaction_dates: Optional[bool] = None, + with_interaction_persons: Optional[bool] = None, + with_opportunities: Optional[bool] = None, + with_current_organizations: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None +) -> Dict[str, Any]: + """Search for persons in Affinity. + + Searches your team's data and fetches all the persons that meet the search criteria. + The search term can be part of an email address, a first name or a last name. + + Args: + term: A string used to search all the persons in your team's address book + with_interaction_dates: When true, interaction dates will be present on the returned resources + with_interaction_persons: When true, persons for each interaction will be returned + with_opportunities: When true, opportunity IDs will be returned for each person + with_current_organizations: When true, current organization IDs will be returned + page_size: How many results to return per page (Default is 500) + page_token: Token from previous response required to retrieve the next page of results + """ + logger.info("Executing tool: search_persons") + try: + params = {} + if term: + params["term"] = term + if with_interaction_dates is not None: + params["with_interaction_dates"] = with_interaction_dates + if with_interaction_persons is not None: + params["with_interaction_persons"] = with_interaction_persons + if with_opportunities is not None: + params["with_opportunities"] = with_opportunities + if with_current_organizations is not None: + params["with_current_organizations"] = with_current_organizations + if page_size: + params["page_size"] = page_size + if page_token: + params["page_token"] = page_token + + return await make_http_request("GET", "/persons", params=params) + except Exception as e: + logger.exception(f"Error executing tool search_persons: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/airtable/.env.example b/mcp_servers/airtable/.env.example new file mode 100644 index 00000000..d4934d63 --- /dev/null +++ b/mcp_servers/airtable/.env.example @@ -0,0 +1,2 @@ +AIRTABLE_PERSONAL_ACCESS_TOKEN=AIRTABLE_PERSONAL_ACCESS_TOKEN_HERE +AIRTABLE_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/airtable/Dockerfile b/mcp_servers/airtable/Dockerfile new file mode 100644 index 00000000..8cd7893a --- /dev/null +++ b/mcp_servers/airtable/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/airtable/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/airtable/server.py . +COPY mcp_servers/airtable/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/airtable/README.md b/mcp_servers/airtable/README.md new file mode 100644 index 00000000..abc57085 --- /dev/null +++ b/mcp_servers/airtable/README.md @@ -0,0 +1,78 @@ +# Airtable MCP Server + +A Model Context Protocol (MCP) server for Airtable integration. Manage bases, tables, and records using Airtable's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Airtable with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("AIRTABLE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/airtable-mcp-server:latest + + +# Run Airtable MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/airtable-mcp-server:latest + + +# Run Airtable MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_airtable_api_key_here"}' \ + ghcr.io/klavis-ai/airtable-mcp-server:latest +``` + +**OAuth Setup:** Airtable requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Record Management**: Create, read, update, and delete Airtable records +- **Table Operations**: Manage table structure and schema +- **Base Management**: Access and manage Airtable bases +- **Field Operations**: Handle different field types and data validation +- **View Management**: Work with filtered views and sorting + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/airtable/requirements.txt b/mcp_servers/airtable/requirements.txt new file mode 100644 index 00000000..39966028 --- /dev/null +++ b/mcp_servers/airtable/requirements.txt @@ -0,0 +1,8 @@ +mcp==1.11.0 +pydantic +typing-extensions +aiohttp +click +python-dotenv +starlette +uvicorn[standard] \ No newline at end of file diff --git a/mcp_servers/airtable/server.py b/mcp_servers/airtable/server.py new file mode 100644 index 00000000..54e5b3d3 --- /dev/null +++ b/mcp_servers/airtable/server.py @@ -0,0 +1,788 @@ +import contextlib +import base64 +import json +import logging +import os +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from dotenv import load_dotenv +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + auth_token_context, + create_field, + create_records, + create_table, + delete_records, + get_bases_info, + get_record, + get_tables_info, + list_records, + update_field, + update_records, + update_table, +) + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +AIRTABLE_MCP_SERVER_PORT = int(os.getenv("AIRTABLE_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option( + "--port", default=AIRTABLE_MCP_SERVER_PORT, help="Port to listen on for HTTP" +) +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("airtable-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="airtable_list_bases_info", + description="Get information about all bases", + inputSchema={ + "type": "object", + "properties": {}, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_BASE", "readOnlyHint": True} + ), + ), + types.Tool( + name="airtable_list_tables_info", + description="Get information about all tables in a base", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base to get tables from", + }, + }, + "required": ["base_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_TABLE", "readOnlyHint": True} + ), + ), + types.Tool( + name="airtable_create_table", + description="Create a new table in a base", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base to create the table in", + }, + "name": { + "type": "string", + "description": "Name of the new table", + }, + "description": { + "type": "string", + "description": "Optional description of the table", + }, + "fields": { + "type": "array", + "items": {"type": "object"}, + "description": "Array of field objects to create in the table", + }, + }, + "required": ["base_id", "name", "fields"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_TABLE"} + ), + ), + types.Tool( + name="airtable_update_table", + description="Update an existing table in a base", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table to update", + }, + "name": { + "type": "string", + "description": "Optional new name for the table", + }, + "description": { + "type": "string", + "description": "Optional new description for the table", + }, + }, + "required": ["base_id", "table_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_TABLE"} + ), + ), + types.Tool( + name="airtable_create_field", + description="Create a new field in a table", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table to create the field in", + }, + "name": { + "type": "string", + "description": "Name of the new field", + }, + "type": { + "type": "string", + "description": "Type of the field (e.g., 'singleLineText', 'number', 'singleSelect', etc.)", + }, + "description": { + "type": "string", + "description": "Optional description of the field", + }, + "options": { + "type": "object", + "description": "Optional field configuration options specific to the field type", + }, + }, + "required": ["base_id", "table_id", "name", "type"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_FIELD"} + ), + ), + types.Tool( + name="airtable_update_field", + description="Update an existing field in a table", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table containing the field", + }, + "field_id": { + "type": "string", + "description": "ID of the field to update", + }, + "name": { + "type": "string", + "description": "Optional new name for the field", + }, + "description": { + "type": "string", + "description": "Optional new description for the field", + }, + }, + "required": ["base_id", "table_id", "field_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_FIELD"} + ), + ), + types.Tool( + name="airtable_list_records", + description="Get all records from a table with optional filtering and formatting", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID or name of the table to get records from", + }, + "fields": { + "type": "array", + "items": {"type": "string"}, + "description": "List of field names to include in results (only these fields will be returned)", + }, + "filter_by_formula": { + "type": "string", + "description": "Formula to filter records (e.g., \"{Status} = 'Active'\")", + }, + "max_records": { + "type": "integer", + "description": "Maximum number of records to return (default: all records, max: 100)", + }, + "page_size": { + "type": "integer", + "description": "Number of records to return per page (1-100, default: 100)", + }, + "sort": { + "type": "array", + "items": { + "type": "object", + "properties": { + "field": {"type": "string"}, + "direction": { + "type": "string", + "enum": ["asc", "desc"], + }, + }, + }, + "description": "List of sort objects with 'field' and 'direction' keys", + }, + "return_fields_by_field_id": { + "type": "boolean", + "description": "Return fields keyed by field ID instead of name", + }, + }, + "required": ["base_id", "table_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_RECORD", "readOnlyHint": True} + ), + ), + types.Tool( + name="airtable_get_record", + description="Get a single record from a table", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID or name of the table containing the record", + }, + "record_id": { + "type": "string", + "description": "ID of the record to retrieve", + }, + }, + "required": ["base_id", "table_id", "record_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_RECORD", "readOnlyHint": True} + ), + ), + types.Tool( + name="airtable_create_records", + description="Create multiple records in a table", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table to create records in", + }, + "records": { + "type": "array", + "items": {"type": "object"}, + "description": "Array of record objects to create", + }, + "typecast": { + "type": "boolean", + "description": "Whether to automatically convert string values to appropriate types", + }, + "return_fields_by_field_id": { + "type": "boolean", + "description": "Whether to return fields keyed by field ID instead of name", + }, + }, + "required": ["base_id", "table_id", "records"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_RECORD"} + ), + ), + types.Tool( + name="airtable_update_records", + description="Update multiple records in a table with optional upsert functionality", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table containing the records", + }, + "records": { + "type": "array", + "items": {"type": "object"}, + "description": "Array of record objects. For regular updates: include 'id' and 'fields'. For upserts: include only 'fields'", + }, + "typecast": { + "type": "boolean", + "description": "Whether to automatically convert string values to appropriate types", + }, + "return_fields_by_field_id": { + "type": "boolean", + "description": "Whether to return fields keyed by field ID instead of name", + }, + "perform_upsert": { + "type": "object", + "description": "Upsert configuration with fieldsToMergeOn array for matching existing records", + "properties": { + "fieldsToMergeOn": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of field names to use for matching existing records", + } + }, + }, + }, + "required": ["base_id", "table_id", "records"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_RECORD"} + ), + ), + types.Tool( + name="airtable_delete_records", + description="Delete multiple records from a table", + inputSchema={ + "type": "object", + "properties": { + "base_id": { + "type": "string", + "description": "ID of the base containing the table", + }, + "table_id": { + "type": "string", + "description": "ID of the table containing the records", + }, + "record_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of record IDs to delete", + }, + }, + "required": ["base_id", "table_id", "record_ids"], + }, + annotations=types.ToolAnnotations( + **{"category": "AIRTABLE_RECORD"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "airtable_list_bases_info": + try: + result = await get_bases_info() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_list_tables_info": + try: + result = await get_tables_info(base_id=arguments.get("base_id")) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_create_table": + try: + result = await create_table( + base_id=arguments.get("base_id"), + name=arguments.get("name"), + description=arguments.get("description"), + fields=arguments.get("fields"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_update_table": + try: + result = await update_table( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + name=arguments.get("name"), + description=arguments.get("description"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_create_field": + try: + result = await create_field( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + name=arguments.get("name"), + type=arguments.get("type"), + description=arguments.get("description"), + options=arguments.get("options"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_update_field": + try: + result = await update_field( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + field_id=arguments.get("field_id"), + name=arguments.get("name"), + description=arguments.get("description"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_list_records": + try: + result = await list_records( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + fields=arguments.get("fields"), + filter_by_formula=arguments.get("filter_by_formula"), + max_records=arguments.get("max_records"), + page_size=arguments.get("page_size"), + sort=arguments.get("sort"), + return_fields_by_field_id=arguments.get( + "return_fields_by_field_id" + ), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_get_record": + try: + result = await get_record( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + record_id=arguments.get("record_id"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_create_records": + try: + result = await create_records( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + records=arguments.get("records"), + typecast=arguments.get("typecast"), + return_fields_by_field_id=arguments.get( + "return_fields_by_field_id" + ), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_update_records": + try: + result = await update_records( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + records=arguments.get("records"), + typecast=arguments.get("typecast"), + return_fields_by_field_id=arguments.get( + "return_fields_by_field_id" + ), + perform_upsert=arguments.get("perform_upsert"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "airtable_delete_records": + try: + result = await delete_records( + base_id=arguments.get("base_id"), + table_id=arguments.get("table_id"), + record_ids=arguments.get("record_ids"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/airtable/tools/__init__.py b/mcp_servers/airtable/tools/__init__.py new file mode 100644 index 00000000..32529c8a --- /dev/null +++ b/mcp_servers/airtable/tools/__init__.py @@ -0,0 +1,34 @@ +# Airtable MCP Server Tools +# This package contains all the tool implementations organized by object type + +from .bases import get_bases_info +from .fields import create_field, update_field +from .records import ( + create_records, + delete_records, + get_record, + list_records, + update_records, +) +from .tables import create_table, get_tables_info, update_table +from .base import auth_token_context + +__all__ = [ + # Bases + "get_bases_info", + # Tables + "get_tables_info", + "create_table", + "update_table", + # Fields + "create_field", + "update_field", + # Records + "list_records", + "get_record", + "create_records", + "update_records", + "delete_records", + # Base + "auth_token_context", +] diff --git a/mcp_servers/airtable/tools/base.py b/mcp_servers/airtable/tools/base.py new file mode 100644 index 00000000..b2cc308c --- /dev/null +++ b/mcp_servers/airtable/tools/base.py @@ -0,0 +1,120 @@ +import logging +import os +from typing import Optional +from contextvars import ContextVar + +import aiohttp +from dotenv import load_dotenv + +# Configure logging +logger = logging.getLogger(__name__) + + +class AirtableValidationError(Exception): + """Custom exception for Airtable 422 validation errors.""" + + pass + + +load_dotenv() + +AIRTABLE_API_BASE = "https://api.airtable.com/v0" + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable if no token in context + token = os.getenv("AIRTABLE_PERSONAL_ACCESS_TOKEN") + if not token: + raise RuntimeError("No authentication token available") + return token + except LookupError: + token = os.getenv("AIRTABLE_PERSONAL_ACCESS_TOKEN") + if not token: + raise RuntimeError("Authentication token not found in request context or environment") + return token + +def _get_airtable_headers() -> dict: + """Get the standard headers for Airtable API requests.""" + auth_token = get_auth_token() + return { + "Authorization": f"Bearer {auth_token}", + "Content-Type": "application/json", + } + + +async def make_airtable_request( + method: str, + endpoint: str, + json_data: Optional[dict] = None, + expect_empty_response: bool = False, +) -> dict | None: + """Make a request to the Airtable API.""" + url = f"{AIRTABLE_API_BASE}/{endpoint}" + headers = _get_airtable_headers() + async with aiohttp.ClientSession(headers=headers) as session: + try: + async with session.request(method, url, json=json_data) as response: + # Handle 422 validation errors specially + if response.status == 422: + error_text = await response.text() + logger.error(f"Airtable API 422 Error: {error_text}") + raise AirtableValidationError( + "Invalid Request body: You may have missed a required field or provided an invalid field. Please check the Airtable Field model for the correct field types and options." + ) + response.raise_for_status() # Raise exception for non-2xx status codes + if expect_empty_response: + # For requests like DELETE or PUT roles/reactions where success is 204 No Content + if response.status == 204: + return None + else: + # If we expected empty but got something else (and it wasn't an error raised above) + logger.warning( + f"Expected empty response for {method} {endpoint}, but got status {response.status}" + ) + # Try to parse JSON anyway, might be useful error info + try: + return await response.json() + except aiohttp.ContentTypeError: + return await response.text() # Return text if not json + else: + # Check if response is JSON before parsing + if "application/json" in response.headers.get("Content-Type", ""): + return await response.json() + else: + # Handle non-JSON responses if necessary, e.g., log or return text + text_content = await response.text() + logger.warning( + f"Received non-JSON response for {method} {endpoint}: {text_content[:100]}..." + ) + return {"raw_content": text_content} + except AirtableValidationError as e: + # Re-raise 422 validation errors with their specific message + raise e + except aiohttp.ClientResponseError as e: + logger.error( + f"Airtable API request failed: {e.status} {e.message} for {method} {url}" + ) + error_details = e.message + try: + # Airtable often returns JSON errors + error_body = await e.response.json() + error_details = f"{e.message} - {error_body}" + except Exception: + # If response body isn't JSON or can't be read + pass + raise RuntimeError( + f"Airtable API Error ({e.status}): {error_details}" + ) from e + except Exception as e: + logger.error( + f"An unexpected error occurred during Airtable API request: {e}" + ) + raise RuntimeError( + f"Unexpected error during API call to {method} {url}" + ) from e diff --git a/mcp_servers/airtable/tools/bases.py b/mcp_servers/airtable/tools/bases.py new file mode 100644 index 00000000..6dd0784e --- /dev/null +++ b/mcp_servers/airtable/tools/bases.py @@ -0,0 +1,14 @@ +import logging +from typing import Any, Dict + +from .base import make_airtable_request + +# Configure logging +logger = logging.getLogger("airtable_tools") + + +async def get_bases_info() -> Dict[str, Any]: + """Get information about all bases.""" + endpoint = "meta/bases" + logger.info("Executing tool: get_bases_info") + return await make_airtable_request("GET", endpoint) diff --git a/mcp_servers/airtable/tools/fields.py b/mcp_servers/airtable/tools/fields.py new file mode 100644 index 00000000..7c43a098 --- /dev/null +++ b/mcp_servers/airtable/tools/fields.py @@ -0,0 +1,59 @@ +import logging +from typing import Any, Dict + +from .base import make_airtable_request + +# Configure logging +logger = logging.getLogger("airtable_tools") + + +async def create_field( + base_id: str, + table_id: str, + name: str, + type: str, + description: str | None = None, + options: Dict[str, Any] | None = None, +) -> Dict[str, Any]: + """Create a new field in a table.""" + endpoint = f"meta/bases/{base_id}/tables/{table_id}/fields" + + payload = { + "name": name, + "type": type, + } + + if description: + payload["description"] = description + + if options: + payload["options"] = options + + logger.info( + f"Executing tool: create_field '{name}' of type '{type}' in table {table_id}, base {base_id}" + ) + return await make_airtable_request("POST", endpoint, json_data=payload) + + +async def update_field( + base_id: str, + table_id: str, + field_id: str, + name: str | None = None, + description: str | None = None, +) -> Dict[str, Any]: + """Update an existing field in a table.""" + endpoint = f"meta/bases/{base_id}/tables/{table_id}/fields/{field_id}" + + payload = {} + + if name is not None: + payload["name"] = name + + if description is not None: + payload["description"] = description + + logger.info( + f"Executing tool: update_field '{field_id}' in table {table_id}, base {base_id}" + ) + return await make_airtable_request("PATCH", endpoint, json_data=payload) diff --git a/mcp_servers/airtable/tools/records.py b/mcp_servers/airtable/tools/records.py new file mode 100644 index 00000000..6896dff4 --- /dev/null +++ b/mcp_servers/airtable/tools/records.py @@ -0,0 +1,147 @@ +import logging +from typing import Any, Dict + +from .base import make_airtable_request + +# Configure logging +logger = logging.getLogger("airtable_tools") + + +async def list_records( + base_id: str, + table_id: str, + fields: list[str] | None = None, + filter_by_formula: str | None = None, + max_records: int | None = None, + page_size: int | None = None, + sort: list[Dict[str, str]] | None = None, + return_fields_by_field_id: bool | None = None, +) -> Dict[str, Any]: + """Get all records from a table with optional filtering and formatting.""" + endpoint = f"{base_id}/{table_id}" + + # Build query parameters + params = {} + + if fields: + for field in fields: + params["fields[]"] = field + + if filter_by_formula: + params["filterByFormula"] = filter_by_formula + + if max_records is not None: + params["maxRecords"] = max_records + + if page_size is not None: + params["pageSize"] = page_size + + if sort: + for i, sort_item in enumerate(sort): + if "field" in sort_item: + params[f"sort[{i}][field]"] = sort_item["field"] + if "direction" in sort_item: + params[f"sort[{i}][direction]"] = sort_item["direction"] + + if return_fields_by_field_id is not None: + params["returnFieldsByFieldId"] = str(return_fields_by_field_id).lower() + + # Add query parameters to endpoint if any exist + if params: + # Convert params to query string + query_parts = [] + for key, value in params.items(): + if key == "fields[]": + # Handle multiple fields specially + continue + query_parts.append(f"{key}={value}") + + # Handle fields separately to allow multiple values + if fields: + for field in fields: + query_parts.append(f"fields[]={field}") + + if query_parts: + endpoint = f"{endpoint}?{'&'.join(query_parts)}" + + logger.info(f"Executing tool: list_records for table {table_id} in base {base_id}") + return await make_airtable_request("GET", endpoint) + + +async def get_record(base_id: str, table_id: str, record_id: str) -> Dict[str, Any]: + """Get a single record from a table.""" + endpoint = f"{base_id}/{table_id}/{record_id}" + logger.info( + f"Executing tool: get_record for record {record_id} in table {table_id}, base {base_id}" + ) + return await make_airtable_request("GET", endpoint) + + +async def create_records( + base_id: str, + table_id: str, + records: list[Dict[str, Any]], + typecast: bool | None = None, + return_fields_by_field_id: bool | None = None, +) -> Dict[str, Any]: + """Create one or multiple records in a table.""" + endpoint = f"{base_id}/{table_id}" + + payload = { + "typecast": typecast, + "returnFieldsByFieldId": return_fields_by_field_id, + "records": records, + } + + logger.info( + f"Executing tool: create_records for table {table_id} in base {base_id}" + ) + return await make_airtable_request("POST", endpoint, json_data=payload) + + +async def update_records( + base_id: str, + table_id: str, + records: list[Dict[str, Any]], + typecast: bool | None = None, + return_fields_by_field_id: bool | None = None, + perform_upsert: Dict[str, Any] | None = None, +) -> Dict[str, Any]: + """Update one or multiple records in a table, with optional upsert functionality.""" + endpoint = f"{base_id}/{table_id}" + + payload = { + "records": records, + } + + if typecast is not None: + payload["typecast"] = typecast + + if return_fields_by_field_id is not None: + payload["returnFieldsByFieldId"] = return_fields_by_field_id + + if perform_upsert is not None: + payload["performUpsert"] = perform_upsert + + logger.info( + f"Executing tool: update_records for table {table_id} in base {base_id}" + ) + return await make_airtable_request("PATCH", endpoint, json_data=payload) + + +async def delete_records( + base_id: str, + table_id: str, + record_ids: list[str], +) -> Dict[str, Any]: + """Delete multiple records from a table.""" + endpoint = f"{base_id}/{table_id}" + + # Build query params string with multiple record IDs + records_params = "&".join([f"records[]={record_id}" for record_id in record_ids]) + endpoint = f"{endpoint}?{records_params}" + + logger.info( + f"Executing tool: delete_records for table {table_id} in base {base_id}" + ) + return await make_airtable_request("DELETE", endpoint) diff --git a/mcp_servers/airtable/tools/tables.py b/mcp_servers/airtable/tools/tables.py new file mode 100644 index 00000000..1bd5987d --- /dev/null +++ b/mcp_servers/airtable/tools/tables.py @@ -0,0 +1,53 @@ +import logging +from typing import Any, Dict + +from .base import make_airtable_request + +# Configure logging +logger = logging.getLogger("airtable_tools") + + +async def get_tables_info(base_id: str) -> Dict[str, Any]: + """Get information about all tables in a base.""" + endpoint = f"meta/bases/{base_id}/tables" + logger.info(f"Executing tool: get_tables_info for base_id: {base_id}") + return await make_airtable_request("GET", endpoint) + + +async def create_table( + base_id: str, + name: str, + fields: list[dict], + description: str | None = None, +) -> Dict[str, Any]: + """Create a new table in a base.""" + endpoint = f"meta/bases/{base_id}/tables" + + payload = { + "name": name, + "fields": fields, + } + if description: + payload["description"] = description + + logger.info(f"Executing tool: create_table for base_id: {base_id}") + return await make_airtable_request("POST", endpoint, json_data=payload) + + +async def update_table( + base_id: str, + table_id: str, + name: str | None = None, + description: str | None = None, +) -> Dict[str, Any]: + """Update an existing table in a base.""" + endpoint = f"meta/bases/{base_id}/tables/{table_id}" + + payload = {} + if name: + payload["name"] = name + if description: + payload["description"] = description + + logger.info(f"Executing tool: update_table for table {table_id} in base {base_id}") + return await make_airtable_request("PATCH", endpoint, json_data=payload) diff --git a/mcp_servers/asana/.env.example b/mcp_servers/asana/.env.example new file mode 100644 index 00000000..197a12b8 --- /dev/null +++ b/mcp_servers/asana/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +ASANA_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/asana/Dockerfile b/mcp_servers/asana/Dockerfile new file mode 100644 index 00000000..21274c2c --- /dev/null +++ b/mcp_servers/asana/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/asana/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/asana/server.py . +COPY mcp_servers/asana/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/asana/README.md b/mcp_servers/asana/README.md new file mode 100644 index 00000000..845534fa --- /dev/null +++ b/mcp_servers/asana/README.md @@ -0,0 +1,78 @@ +# Asana MCP Server + +A Model Context Protocol (MCP) server for Asana integration. Manage tasks, projects, and team workflows using Asana's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Asana with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("ASANA", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/asana-mcp-server:latest + + +# Run Asana MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/asana-mcp-server:latest + + +# Run Asana MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_asana_api_key_here"}' \ + ghcr.io/klavis-ai/asana-mcp-server:latest +``` + +**OAuth Setup:** Asana requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Task Management**: Create, read, update, and complete tasks +- **Project Operations**: Manage projects and project timelines +- **Team Collaboration**: Handle team assignments and permissions +- **Custom Fields**: Work with custom task and project fields +- **Status Updates**: Track progress and project status + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/asana/__init__.py b/mcp_servers/asana/__init__.py new file mode 100644 index 00000000..f5fdf9d1 --- /dev/null +++ b/mcp_servers/asana/__init__.py @@ -0,0 +1 @@ +# Asana MCP Server Package \ No newline at end of file diff --git a/mcp_servers/asana/requirements.txt b/mcp_servers/asana/requirements.txt new file mode 100644 index 00000000..a5999950 --- /dev/null +++ b/mcp_servers/asana/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette +asyncio \ No newline at end of file diff --git a/mcp_servers/asana/server.py b/mcp_servers/asana/server.py new file mode 100644 index 00000000..c70f0865 --- /dev/null +++ b/mcp_servers/asana/server.py @@ -0,0 +1,910 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools.base import AsanaToolExecutionError +from tools.constants import ( + TaskSortBy, + SortOrder, + TagColor, +) + +# Import tools +from tools import tasks as task_tools +from tools import projects as project_tools +from tools import workspaces as workspace_tools +from tools import users as user_tools +from tools import teams as team_tools +from tools import tags as tag_tools +from tools.base import auth_token_context + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +ASANA_MCP_SERVER_PORT = int(os.getenv("ASANA_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=ASANA_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + app = Server("asana-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="asana_create_task", + description="Create a new task in Asana. You MUST call asana_get_workspaces first to get the workspace_id for the task.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the task", + }, + "start_date": { + "type": "string", + "description": "The start date of the task in YYYY-MM-DD format", + }, + "due_date": { + "type": "string", + "description": "The due date of the task in YYYY-MM-DD format", + }, + "description": { + "type": "string", + "description": "The description of the task", + }, + "parent_task_id": { + "type": "string", + "description": "The ID of the parent task", + }, + "workspace_id": { + "type": "string", + "description": "The ID of the workspace", + }, + "project": { + "type": "string", + "description": "The ID or name of the project", + }, + "assignee_id": { + "type": "string", + "description": "The ID of the assignee (defaults to 'me')", + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "List of tag names or IDs", + }, + }, + "required": ["name", "workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK"} + ), + ), + types.Tool( + name="asana_get_task", + description="Get a task by its ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to get", + }, + "max_subtasks": { + "type": "integer", + "description": "Maximum number of subtasks to return (0-100, default 100)", + "minimum": 0, + "maximum": 100, + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_search_tasks", + description="Search for tasks in Asana. You MUST call asana_get_workspaces first to get the workspace_id for the task.", + inputSchema={ + "type": "object", + "properties": { + "keywords": { + "type": "string", + "description": "Keywords to search for in task names and descriptions", + }, + "workspace_id": { + "type": "string", + "description": "The workspace ID to search in", + }, + "assignee_id": { + "type": "string", + "description": "Filter by assignee ID", + }, + "project": { + "type": "string", + "description": "Project ID or name to filter by", + }, + "team_id": { + "type": "string", + "description": "Team ID to filter by", + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "Tag names or IDs to filter by", + }, + "due_on": { + "type": "string", + "description": "Filter tasks due on this date (YYYY-MM-DD)", + }, + "due_on_or_after": { + "type": "string", + "description": "Filter tasks due on or after this date (YYYY-MM-DD)", + }, + "due_on_or_before": { + "type": "string", + "description": "Filter tasks due on or before this date (YYYY-MM-DD)", + }, + "start_on": { + "type": "string", + "description": "Filter tasks starting on this date (YYYY-MM-DD)", + }, + "start_on_or_after": { + "type": "string", + "description": "Filter tasks starting on or after this date (YYYY-MM-DD)", + }, + "start_on_or_before": { + "type": "string", + "description": "Filter tasks starting on or before this date (YYYY-MM-DD)", + }, + "completed": { + "type": "boolean", + "description": "Filter by completion status", + }, + "limit": { + "type": "integer", + "description": "Maximum number of tasks to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "sort_by": { + "type": "string", + "enum": ["created_at", "modified_at", "due_date"], + "description": "Sort tasks by this field (default: modified_at)", + }, + "sort_order": { + "type": "string", + "enum": ["ascending", "descending"], + "description": "Sort order (default: descending)", + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_update_task", + description="Update a task in Asana", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to update", + }, + "name": { + "type": "string", + "description": "New name for the task", + }, + "completed": { + "type": "boolean", + "description": "Mark task as completed (true) or incomplete (false)", + }, + "start_date": { + "type": "string", + "description": "New start date in YYYY-MM-DD format", + }, + "due_date": { + "type": "string", + "description": "New due date in YYYY-MM-DD format", + }, + "description": { + "type": "string", + "description": "New description for the task", + }, + "assignee_id": { + "type": "string", + "description": "New assignee ID", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK"} + ), + ), + types.Tool( + name="asana_mark_task_completed", + description="Mark a task as completed in Asana", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to mark as completed", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK"} + ), + ), + types.Tool( + name="asana_get_subtasks", + description="Get subtasks from a task in Asana", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to get subtasks from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of subtasks to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_attach_file_to_task", + description="Attach a file to a task in Asana", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to attach the file to", + }, + "file_name": { + "type": "string", + "description": "The name of the file with extension", + }, + "file_content_str": { + "type": "string", + "description": "String content of the file (for text files)", + }, + "file_content_base64": { + "type": "string", + "description": "Base64-encoded binary content (for binary files)", + }, + "file_content_url": { + "type": "string", + "description": "URL of the file to attach", + }, + "file_encoding": { + "type": "string", + "description": "File encoding (default: utf-8)", + }, + }, + "required": ["task_id", "file_name"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TASK"} + ), + ), + types.Tool( + name="asana_get_projects", + description="Get projects from Asana with optional filtering by timestamps. You MUST call asana_get_workspaces first to get the workspace_id for the project.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The workspace ID to get projects from", + }, + "team_id": { + "type": "string", + "description": "The team ID to get projects from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of projects to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + "filter": { + "type": "object", + "description": "Optional filters for projects", + "properties": { + "created_at": { + "type": "object", + "description": "Filter by creation date", + "properties": { + "gt": { + "type": "string", + "description": "Greater than timestamp (ISO 8601 format)", + }, + "gte": { + "type": "string", + "description": "Greater than or equal timestamp (ISO 8601 format)", + }, + "lt": { + "type": "string", + "description": "Less than timestamp (ISO 8601 format)", + }, + "lte": { + "type": "string", + "description": "Less than or equal timestamp (ISO 8601 format)", + }, + }, + }, + "modified_at": { + "type": "object", + "description": "Filter by modification date", + "properties": { + "gt": { + "type": "string", + "description": "Greater than timestamp (ISO 8601 format)", + }, + "gte": { + "type": "string", + "description": "Greater than or equal timestamp (ISO 8601 format)", + }, + "lt": { + "type": "string", + "description": "Less than timestamp (ISO 8601 format)", + }, + "lte": { + "type": "string", + "description": "Less than or equal timestamp (ISO 8601 format)", + }, + }, + }, + }, + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_PROJECT", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_project", + description="Get a project by its ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "project_id": { + "type": "string", + "description": "The ID of the project to get", + }, + }, + "required": ["project_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_PROJECT", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_workspaces", + description="Get user's workspaces from Asana", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of workspaces to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_WORKSPACE", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_workspace", + description="Get a workspace by its ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The ID of the workspace to get", + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_WORKSPACE", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_users", + description="Get users from Asana. You MUST call asana_get_workspaces first to get the workspace_id for the user.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The workspace ID to get users from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of users to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_user", + description="Get a user by their ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "The ID of the user to get", + }, + }, + "required": ["user_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_teams", + description="Get teams from Asana. You MUST call asana_get_workspaces first to get the workspace_id for the team.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The workspace ID to get teams from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of teams to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TEAM", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_team", + description="Get a team by its ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to get", + }, + }, + "required": ["team_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TEAM", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_user_teams", + description="Get teams that the current user is a member of in Asana", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The workspace ID to get teams from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of teams to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TEAM", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_tags", + description="Get tags from Asana. You MUST call asana_get_workspaces first to get the workspace_id for the tag.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "The workspace ID to get tags from", + }, + "limit": { + "type": "integer", + "description": "Maximum number of tags to return (1-100, default 100)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": ["workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TAG", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_get_tag", + description="Get a tag by its ID from Asana", + inputSchema={ + "type": "object", + "properties": { + "tag_id": { + "type": "string", + "description": "The ID of the tag to get", + }, + }, + "required": ["tag_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TAG", "readOnlyHint": True} + ), + ), + types.Tool( + name="asana_create_tag", + description="Create a tag in Asana. You MUST call asana_get_workspaces first to get the workspace_id for the tag.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the tag", + }, + "description": { + "type": "string", + "description": "The description of the tag", + }, + "color": { + "type": "string", + "enum": ["dark-pink", "dark-green", "dark-blue", "dark-red", "dark-teal", "dark-brown", "dark-orange", "dark-purple", "dark-warm-gray", "light-pink", "light-green", "light-blue", "light-red", "light-teal", "light-brown", "light-orange", "light-purple", "light-warm-gray"], + "description": "The color of the tag", + }, + "workspace_id": { + "type": "string", + "description": "The workspace ID to create the tag in", + }, + }, + "required": ["name", "workspace_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "ASANA_TAG"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + try: + if name == "asana_create_task": + result = await task_tools.create_task(**arguments) + elif name == "asana_get_task": + result = await task_tools.get_task_by_id(**arguments) + elif name == "asana_search_tasks": + # Convert string enums to proper enum types + sort_by = arguments.get("sort_by", "modified_at") + sort_order = arguments.get("sort_order", "descending") + + # Map string values to enum values + sort_by_map = { + "created_at": TaskSortBy.CREATED_AT, + "modified_at": TaskSortBy.MODIFIED_AT, + "due_date": TaskSortBy.DUE_DATE, + } + sort_order_map = { + "ascending": SortOrder.ASCENDING, + "descending": SortOrder.DESCENDING, + } + + arguments["sort_by"] = sort_by_map.get(sort_by, TaskSortBy.MODIFIED_AT) + arguments["sort_order"] = sort_order_map.get(sort_order, SortOrder.DESCENDING) + + result = await task_tools.search_tasks(**arguments) + elif name == "asana_update_task": + result = await task_tools.update_task(**arguments) + elif name == "asana_mark_task_completed": + result = await task_tools.mark_task_as_completed(**arguments) + elif name == "asana_get_subtasks": + result = await task_tools.get_subtasks_from_a_task(**arguments) + elif name == "asana_attach_file_to_task": + result = await task_tools.attach_file_to_task(**arguments) + elif name == "asana_get_projects": + result = await project_tools.list_projects(**arguments) + elif name == "asana_get_project": + result = await project_tools.get_project_by_id(**arguments) + elif name == "asana_get_workspaces": + result = await workspace_tools.list_workspaces(**arguments) + elif name == "asana_get_workspace": + result = await workspace_tools.get_workspace_by_id(**arguments) + elif name == "asana_get_users": + result = await user_tools.list_users(**arguments) + elif name == "asana_get_user": + result = await user_tools.get_user_by_id(**arguments) + elif name == "asana_get_teams": + result = await team_tools.list_teams(**arguments) + elif name == "asana_get_team": + result = await team_tools.get_team_by_id(**arguments) + elif name == "asana_get_user_teams": + result = await team_tools.list_teams_the_current_user_is_a_member_of(**arguments) + elif name == "asana_get_tags": + result = await tag_tools.list_tags(**arguments) + elif name == "asana_get_tag": + result = await tag_tools.get_tag_by_id(**arguments) + elif name == "asana_create_tag": + # Convert string color to enum if provided + if "color" in arguments and arguments["color"]: + color_map = { + "dark-pink": TagColor.DARK_PINK, + "dark-green": TagColor.DARK_GREEN, + "dark-blue": TagColor.DARK_BLUE, + "dark-red": TagColor.DARK_RED, + "dark-teal": TagColor.DARK_TEAL, + "dark-brown": TagColor.DARK_BROWN, + "dark-orange": TagColor.DARK_ORANGE, + "dark-purple": TagColor.DARK_PURPLE, + "dark-warm-gray": TagColor.DARK_WARM_GRAY, + "light-pink": TagColor.LIGHT_PINK, + "light-green": TagColor.LIGHT_GREEN, + "light-blue": TagColor.LIGHT_BLUE, + "light-red": TagColor.LIGHT_RED, + "light-teal": TagColor.LIGHT_TEAL, + "light-brown": TagColor.LIGHT_BROWN, + "light-orange": TagColor.LIGHT_ORANGE, + "light-purple": TagColor.LIGHT_PURPLE, + "light-warm-gray": TagColor.LIGHT_WARM_GRAY, + } + arguments["color"] = color_map.get(arguments["color"]) + result = await tag_tools.create_tag(**arguments) + else: + raise ValueError(f"Unknown tool: {name}") + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except AsanaToolExecutionError as e: + logger.error(f"Retryable error in {name}: {e}") + return [ + types.TextContent( + type="text", + text=json.dumps({ + "error": str(e), + "retry_after_ms": e.retry_after_ms, + "additional_prompt_content": e.additional_prompt_content, + "developer_message": e.developer_message, + }, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error in {name}: {e}") + return [ + types.TextContent( + type="text", + text=json.dumps({"error": str(e)}, indent=2) + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + try: + uvicorn.run( + starlette_app, + host="0.0.0.0", + port=port, + log_level=log_level.lower(), + ) + return 0 + except Exception as e: + logger.exception(f"Failed to start server: {e}") + return 1 + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/mcp_servers/asana/tools/__init__.py b/mcp_servers/asana/tools/__init__.py new file mode 100644 index 00000000..554446cd --- /dev/null +++ b/mcp_servers/asana/tools/__init__.py @@ -0,0 +1,9 @@ +"""Asana MCP Server Tools Package.""" + +from .base import auth_token_context, get_auth_token, get_asana_client + +__all__ = [ + "auth_token_context", + "get_auth_token", + "get_asana_client", +] \ No newline at end of file diff --git a/mcp_servers/asana/tools/base.py b/mcp_servers/asana/tools/base.py new file mode 100644 index 00000000..087a4578 --- /dev/null +++ b/mcp_servers/asana/tools/base.py @@ -0,0 +1,272 @@ +import asyncio +import json +from dataclasses import dataclass +import logging +from typing import Any, Dict, Optional, cast +from contextvars import ContextVar +from functools import wraps + +import httpx + +from .constants import ASANA_API_VERSION, ASANA_BASE_URL, ASANA_MAX_CONCURRENT_REQUESTS, ASANA_MAX_TIMEOUT_SECONDS + +# Configure logging +logger = logging.getLogger(__name__) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +# Type definitions +ToolResponse = dict[str, Any] + +# Exception classes (moved from utils.py) +class ToolExecutionError(Exception): + def __init__(self, message: str, developer_message: str = ""): + super().__init__(message) + self.developer_message = developer_message + + +class AsanaToolExecutionError(ToolExecutionError): + pass + + +class PaginationTimeoutError(AsanaToolExecutionError): + def __init__(self, timeout_seconds: int, tool_name: str): + message = f"Pagination timed out after {timeout_seconds} seconds" + super().__init__( + message=message, + developer_message=f"{message} while calling the tool {tool_name}", + ) + + +class RetryableToolError(Exception): + def __init__(self, message: str, additional_prompt_content: str = "", retry_after_ms: int = 1000, developer_message: str = ""): + super().__init__(message) + self.additional_prompt_content = additional_prompt_content + self.retry_after_ms = retry_after_ms + self.developer_message = developer_message + + +# Utility functions (moved from utils.py) +def remove_none_values(data: dict[str, Any]) -> dict[str, Any]: + return {k: v for k, v in data.items() if v is not None} + + +def get_next_page(response: dict[str, Any]) -> dict[str, Any]: + """Extract next page information from response.""" + next_page = response.get("next_page", {}) + return { + "next_page_token": next_page.get("uri") if next_page else None + } + + +# Decorator function (moved from utils.py) +def clean_asana_response(func): + def response_cleaner(data: dict[str, Any]) -> dict[str, Any]: + if "gid" in data: + data["id"] = data["gid"] + del data["gid"] + + for k, v in data.items(): + if isinstance(v, dict): + data[k] = response_cleaner(v) + elif isinstance(v, list): + data[k] = [ + item if not isinstance(item, dict) else response_cleaner(item) for item in v + ] + + return data + + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + response = await func(*args, **kwargs) + return response_cleaner(response) + + return wrapper + + +async def get_unique_workspace_id_or_raise_error() -> str: + client = get_asana_client() + + response = await client.get("/workspaces") + workspaces = response["data"] + + if len(workspaces) == 1: + return workspaces[0]["id"] + else: + workspaces_info = [{"name": ws["name"], "id": ws["id"]} for ws in workspaces] + message = "Multiple workspaces found. Please provide a workspace_id." + additional_prompt = f"Available workspaces: {json.dumps(workspaces_info)}" + raise RetryableToolError( + message=message, + developer_message=message, + additional_prompt_content=additional_prompt, + ) + + +@dataclass +class AsanaClient: + auth_token: str + base_url: str = ASANA_BASE_URL + api_version: str = ASANA_API_VERSION + max_concurrent_requests: int = ASANA_MAX_CONCURRENT_REQUESTS + _semaphore: asyncio.Semaphore | None = None + + def __post_init__(self) -> None: + self._semaphore = self._semaphore or asyncio.Semaphore(self.max_concurrent_requests) + + def _build_url(https://github.com/self,%20endpoint:%20str,%20api_version:%20str%20|%20None%20=%20None) -> str: + api_version = api_version or self.api_version + return f"{self.base_url.rstrip('/')}/{api_version.strip('/')}/{endpoint.lstrip('/')}" + + def _build_error_messages(self, response: httpx.Response) -> tuple[str, str]: + try: + data = response.json() + errors = data["errors"] + + if len(errors) == 1: + error_message = errors[0]["message"] + developer_message = ( + f"{errors[0]['message']} | {errors[0]['help']} " + f"(HTTP status code: {response.status_code})" + ) + else: + errors_concat = "', '".join([error["message"] for error in errors]) + error_message = f"Multiple errors occurred: '{errors_concat}'" + developer_message = ( + f"Multiple errors occurred: {json.dumps(errors)} " + f"(HTTP status code: {response.status_code})" + ) + + except Exception as e: + error_message = "Failed to parse Asana error response" + developer_message = f"Failed to parse Asana error response: {type(e).__name__}: {e!s}" + + return error_message, developer_message + + def _raise_for_status(self, response: httpx.Response) -> None: + if response.status_code < 300: + return + + error_message, developer_message = self._build_error_messages(response) + + raise AsanaToolExecutionError(error_message, developer_message) + + def _set_request_body(self, kwargs: dict, data: dict | None, json_data: dict | None) -> dict: + if data and json_data: + raise ValueError("Cannot provide both data and json_data") + + if data: + kwargs["data"] = data + + elif json_data: + kwargs["json"] = json_data + + return kwargs + + @clean_asana_response + async def get( + self, + endpoint: str, + params: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + default_headers = { + "Authorization": f"Bearer {self.auth_token}", + "Accept": "application/json", + } + headers = {**default_headers, **(headers or {})} + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + } + + if params: + kwargs["params"] = params + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.get(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + @clean_asana_response + async def post( + self, + endpoint: str, + data: Optional[dict] = None, + json_data: Optional[dict] = None, + files: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + default_headers = { + "Authorization": f"Bearer {self.auth_token}", + "Accept": "application/json", + } + + if files is None and json_data is not None: + default_headers["Content-Type"] = "application/json" + + headers = {**default_headers, **(headers or {})} + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + } + + if files is not None: + kwargs["files"] = files + if data is not None: + kwargs["data"] = data + else: + kwargs = self._set_request_body(kwargs, data, json_data) + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.post(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + @clean_asana_response + async def put( + self, + endpoint: str, + data: Optional[dict] = None, + json_data: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + headers = headers or {} + headers["Authorization"] = f"Bearer {self.auth_token}" + headers["Content-Type"] = "application/json" + headers["Accept"] = "application/json" + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + } + + kwargs = self._set_request_body(kwargs, data, json_data) + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.put(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + async def get_current_user(self) -> dict: + response = await self.get("/users/me") + return cast(dict, response["data"]) + + +def get_asana_client() -> AsanaClient: + """Create Asana client with access token from context.""" + access_token = get_auth_token() + return AsanaClient(auth_token=access_token) + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") \ No newline at end of file diff --git a/mcp_servers/asana/tools/constants.py b/mcp_servers/asana/tools/constants.py new file mode 100644 index 00000000..c570da05 --- /dev/null +++ b/mcp_servers/asana/tools/constants.py @@ -0,0 +1,140 @@ +import os +from enum import Enum + +ASANA_BASE_URL = "https://app.asana.com/api" +ASANA_API_VERSION = "1.0" + +try: + ASANA_MAX_CONCURRENT_REQUESTS = int(os.getenv("ASANA_MAX_CONCURRENT_REQUESTS", 3)) +except ValueError: + ASANA_MAX_CONCURRENT_REQUESTS = 3 + +try: + ASANA_MAX_TIMEOUT_SECONDS = int(os.getenv("ASANA_MAX_TIMEOUT_SECONDS", 20)) +except ValueError: + ASANA_MAX_TIMEOUT_SECONDS = 20 + +MAX_PROJECTS_TO_SCAN_BY_NAME = 1000 +MAX_TAGS_TO_SCAN_BY_NAME = 1000 + +PROJECT_OPT_FIELDS = [ + "gid", + "resource_type", + "name", + "workspace", + "color", + "created_at", + "modified_at", + "current_status_update", + "due_on", + "members", + "notes", + "completed", + "completed_at", + "completed_by", + "owner", + "team", + "workspace", + "permalink_url", +] + +TASK_OPT_FIELDS = [ + "gid", + "name", + "notes", + "completed", + "completed_at", + "completed_by", + "created_at", + "created_by", + "due_on", + "start_on", + "owner", + "team", + "workspace", + "permalink_url", + "approval_status", + "assignee", + "assignee_status", + "dependencies", + "dependents", + "memberships", + "num_subtasks", + "resource_type", + "custom_type", + "custom_type_status_option", + "parent", + "tags", + "workspace", +] + + +TAG_OPT_FIELDS = [ + "gid", + "name", + "workspace", +] + +TEAM_OPT_FIELDS = [ + "gid", + "name", + "description", + "organization", + "permalink_url", +] + +USER_OPT_FIELDS = [ + "gid", + "resource_type", + "name", + "email", + "photo", + "workspaces", +] + +WORKSPACE_OPT_FIELDS = [ + "gid", + "resource_type", + "name", + "email_domains", + "is_organization", +] + + +class TaskSortBy(Enum): + DUE_DATE = "due_date" + CREATED_AT = "created_at" + COMPLETED_AT = "completed_at" + MODIFIED_AT = "modified_at" + LIKES = "likes" + + +class SortOrder(Enum): + ASCENDING = "ascending" + DESCENDING = "descending" + + +class TagColor(Enum): + DARK_GREEN = "dark-green" + DARK_RED = "dark-red" + DARK_BLUE = "dark-blue" + DARK_PURPLE = "dark-purple" + DARK_PINK = "dark-pink" + DARK_ORANGE = "dark-orange" + DARK_TEAL = "dark-teal" + DARK_BROWN = "dark-brown" + DARK_WARM_GRAY = "dark-warm-gray" + LIGHT_GREEN = "light-green" + LIGHT_RED = "light-red" + LIGHT_BLUE = "light-blue" + LIGHT_PURPLE = "light-purple" + LIGHT_PINK = "light-pink" + LIGHT_ORANGE = "light-orange" + LIGHT_TEAL = "light-teal" + LIGHT_BROWN = "light-brown" + LIGHT_WARM_GRAY = "light-warm-gray" + + +class ReturnType(Enum): + FULL_ITEMS_DATA = "full_items_data" + ITEMS_COUNT = "items_count" \ No newline at end of file diff --git a/mcp_servers/asana/tools/projects.py b/mcp_servers/asana/tools/projects.py new file mode 100644 index 00000000..48f87f0f --- /dev/null +++ b/mcp_servers/asana/tools/projects.py @@ -0,0 +1,188 @@ +from typing import Annotated, Any, Dict +import logging +from datetime import datetime, timezone + +from .constants import PROJECT_OPT_FIELDS +from .base import ( + get_asana_client, + get_next_page, + get_unique_workspace_id_or_raise_error, + remove_none_values, + AsanaToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +def parse_timestamp(timestamp_str: str) -> datetime: + """Parse ISO 8601 timestamp string to datetime object. Always returns timezone-aware datetime.""" + try: + # Handle both with and without timezone info + if timestamp_str.endswith('Z'): + timestamp_str = timestamp_str[:-1] + '+00:00' + + dt = datetime.fromisoformat(timestamp_str) + + # If the datetime is naive (no timezone), assume UTC + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + return dt + except (ValueError, AttributeError): + return None + + +def filter_projects_by_timestamps(projects: list, filter_dict: Dict[str, Any]) -> list: + """Filter projects based on timestamp criteria.""" + if not filter_dict: + return projects + + filtered_projects = [] + + for project in projects: + include_project = True + + # Filter by created_at + if "created_at" in filter_dict: + project_created = parse_timestamp(project.get("created_at", "")) + if project_created: + created_filter = filter_dict["created_at"] + + # Support both gt (greater than) and gte (greater than or equal) + if "gt" in created_filter: + filter_date = parse_timestamp(created_filter["gt"]) + if filter_date and project_created <= filter_date: + include_project = False + elif "gte" in created_filter: + filter_date = parse_timestamp(created_filter["gte"]) + if filter_date and project_created < filter_date: + include_project = False + + # Support both lt (less than) and lte (less than or equal) + if "lt" in created_filter: + filter_date = parse_timestamp(created_filter["lt"]) + if filter_date and project_created >= filter_date: + include_project = False + elif "lte" in created_filter: + filter_date = parse_timestamp(created_filter["lte"]) + if filter_date and project_created > filter_date: + include_project = False + + # Filter by modified_at + if include_project and "modified_at" in filter_dict: + project_modified = parse_timestamp(project.get("modified_at", "")) + if project_modified: + modified_filter = filter_dict["modified_at"] + + # Support both gt (greater than) and gte (greater than or equal) + if "gt" in modified_filter: + filter_date = parse_timestamp(modified_filter["gt"]) + if filter_date and project_modified <= filter_date: + include_project = False + elif "gte" in modified_filter: + filter_date = parse_timestamp(modified_filter["gte"]) + if filter_date and project_modified < filter_date: + include_project = False + + # Support both lt (less than) and lte (less than or equal) + if "lt" in modified_filter: + filter_date = parse_timestamp(modified_filter["lt"]) + if filter_date and project_modified >= filter_date: + include_project = False + elif "lte" in modified_filter: + filter_date = parse_timestamp(modified_filter["lte"]) + if filter_date and project_modified > filter_date: + include_project = False + + if include_project: + filtered_projects.append(project) + + return filtered_projects + + +async def get_project_by_id( + project_id: str, +) -> Dict[str, Any]: + """Get a project by its ID""" + try: + client = get_asana_client() + response = await client.get( + f"/projects/{project_id}", + params={"opt_fields": ",".join(PROJECT_OPT_FIELDS)}, + ) + return {"project": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_project_by_id: {e}") + raise e + + +async def list_projects( + team_id: str | None = None, + workspace_id: str | None = None, + limit: int = 100, + next_page_token: str | None = None, + filter: Dict[str, Any] | None = None, +) -> Dict[str, Any]: + """List projects in Asana with optional filtering by timestamps. + + Args: + team_id: Optional team ID to filter projects + workspace_id: Optional workspace ID (defaults to unique workspace if not provided) + limit: Maximum number of projects to return (1-100) + next_page_token: Token for pagination + filter: Optional filter dictionary with timestamp filters: + - created_at: Filter by creation date with gt/gte/lt/lte operators + - modified_at: Filter by modification date with gt/gte/lt/lte operators + Example: {"modified_at": {"gte": "2024-01-01T00:00:00Z"}} + + Returns: + Dictionary containing filtered projects, count, and pagination info + """ + try: + # Note: Asana recommends filtering by team to avoid timeout in large domains. + # Ref: https://developers.asana.com/reference/getprojects + limit = max(1, min(100, limit)) + + workspace_id = workspace_id or await get_unique_workspace_id_or_raise_error() + + client = get_asana_client() + + # If filtering is requested and pagination is involved, we need to fetch more data + # to ensure we have enough filtered results + fetch_limit = limit if not filter else min(100, limit * 3) + + response = await client.get( + "/projects", + params=remove_none_values({ + "limit": fetch_limit, + "offset": next_page_token, + "team": team_id, + "workspace": workspace_id, + "opt_fields": ",".join(PROJECT_OPT_FIELDS), + }), + ) + + projects = response["data"] + + # Apply client-side filtering if filter is provided + if filter: + projects = filter_projects_by_timestamps(projects, filter) + # Trim to requested limit after filtering + projects = projects[:limit] + + return { + "projects": projects, + "count": len(projects), + "next_page": get_next_page(response) if not filter else None, # Pagination is complex with filtering + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_projects: {e}") + raise e diff --git a/mcp_servers/asana/tools/tags.py b/mcp_servers/asana/tools/tags.py new file mode 100644 index 00000000..cfd60ada --- /dev/null +++ b/mcp_servers/asana/tools/tags.py @@ -0,0 +1,98 @@ +from typing import Annotated, Any, Dict +import logging + +from .constants import TAG_OPT_FIELDS, TagColor +from .base import ( + get_asana_client, + get_next_page, + get_unique_workspace_id_or_raise_error, + remove_none_values, + AsanaToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +async def get_tag_by_id( + tag_id: str, +) -> Dict[str, Any]: + """Get an Asana tag by its ID""" + try: + client = get_asana_client() + response = await client.get(f"/tags/{tag_id}") + return {"tag": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_tag_by_id: {e}") + raise e + + +async def create_tag( + name: str, + description: str | None = None, + color: TagColor | None = None, + workspace_id: str | None = None, +) -> Dict[str, Any]: + """Create a tag in Asana""" + try: + if not 1 <= len(name) <= 100: + raise ValueError("Tag name must be between 1 and 100 characters long.") + + workspace_id = workspace_id or await get_unique_workspace_id_or_raise_error() + + data = remove_none_values({ + "name": name, + "notes": description, + "color": color.value if color else None, + "workspace": workspace_id, + }) + + client = get_asana_client() + response = await client.post("/tags", json_data={"data": data}) + return {"tag": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing create_tag: {e}") + raise e + + +async def list_tags( + workspace_id: str | None = None, + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List tags in an Asana workspace""" + try: + limit = max(1, min(100, limit)) + + workspace_id = workspace_id or await get_unique_workspace_id_or_raise_error() + + client = get_asana_client() + response = await client.get( + "/tags", + params=remove_none_values({ + "limit": limit, + "offset": next_page_token, + "workspace": workspace_id, + "opt_fields": ",".join(TAG_OPT_FIELDS), + }), + ) + + return { + "tags": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_tags: {e}") + raise e diff --git a/mcp_servers/asana/tools/tasks.py b/mcp_servers/asana/tools/tasks.py new file mode 100644 index 00000000..30f401fe --- /dev/null +++ b/mcp_servers/asana/tools/tasks.py @@ -0,0 +1,651 @@ +import base64 +from typing import Annotated, Any, Dict, cast +import logging +import json +from datetime import datetime + +from .constants import TASK_OPT_FIELDS, SortOrder, TaskSortBy, MAX_PROJECTS_TO_SCAN_BY_NAME, MAX_TAGS_TO_SCAN_BY_NAME +from .base import ( + get_asana_client, + get_next_page, + remove_none_values, + get_unique_workspace_id_or_raise_error, + AsanaToolExecutionError, + RetryableToolError, +) + +logger = logging.getLogger(__name__) + + +def validate_date_format(name: str, date_str: str | None) -> None: + if not date_str: + return + + try: + datetime.strptime(date_str, "%Y-%m-%d") + except ValueError: + raise AsanaToolExecutionError(f"Invalid {name} date format. Use the format YYYY-MM-DD.") + + +def build_task_search_query_params( + keywords: str | None, + completed: bool | None, + assignee_id: str | None, + project_id: str | None, + team_id: str | None, + tag_ids: list[str] | None, + due_on: str | None, + due_on_or_after: str | None, + due_on_or_before: str | None, + start_on: str | None, + start_on_or_after: str | None, + start_on_or_before: str | None, + limit: int, + sort_by: TaskSortBy, + sort_order: SortOrder, +) -> dict[str, Any]: + query_params: dict[str, Any] = { + "text": keywords, + "opt_fields": ",".join(TASK_OPT_FIELDS), + "sort_by": sort_by.value, + "sort_ascending": sort_order == SortOrder.ASCENDING, + "limit": limit, + } + if completed is not None: + query_params["completed"] = completed + if assignee_id: + query_params["assignee.any"] = assignee_id + if project_id: + query_params["projects.any"] = project_id + if team_id: + query_params["team.any"] = team_id + if tag_ids: + query_params["tags.any"] = ",".join(tag_ids) + + query_params = add_task_search_date_params( + query_params, + due_on, + due_on_or_after, + due_on_or_before, + start_on, + start_on_or_after, + start_on_or_before, + ) + + return query_params + + +def add_task_search_date_params( + query_params: dict[str, Any], + due_on: str | None, + due_on_or_after: str | None, + due_on_or_before: str | None, + start_on: str | None, + start_on_or_after: str | None, + start_on_or_before: str | None, +) -> dict[str, Any]: + """ + Builds the date-related query parameters for task search. + + If a date is provided, it will be added to the query parameters. If not, it will be ignored. + """ + if due_on: + query_params["due_on"] = due_on + if due_on_or_after: + query_params["due_on.after"] = due_on_or_after + if due_on_or_before: + query_params["due_on.before"] = due_on_or_before + if start_on: + query_params["start_on"] = start_on + if start_on_or_after: + query_params["start_on.after"] = start_on_or_after + if start_on_or_before: + query_params["start_on.before"] = start_on_or_before + + return query_params + + +async def handle_new_task_associations( + parent_task_id: str | None, + project: str | None, + workspace_id: str | None, +) -> tuple[str | None, str | None, str | None]: + """ + Handles the association of a new task to a parent task, project, or workspace. + + If no association is provided, it will try to find a workspace in the user's account. + In case the user has only one workspace, it will use that workspace. + Otherwise, it will raise an error. + + If a workspace_id is not provided, but a parent_task_id or a project_id is provided, it will try + to find the workspace associated with the parent task or project. + + In each of the two cases explained above, if a workspace is found, the function will return this + value, even if the workspace_id argument was None. + + Returns a tuple of (parent_task_id, project_id, workspace_id). + """ + project_id, project_name = (None, None) + + if project: + if project.isnumeric(): + project_id = project + else: + project_name = project + + if project_name: + project_data = await get_project_by_name_or_raise_error(project_name) + project_id = project_data["id"] + workspace_id = project_data["workspace"]["id"] + + if not any([parent_task_id, project_id, workspace_id]): + workspace_id = await get_unique_workspace_id_or_raise_error() + + if not workspace_id and parent_task_id: + client = get_asana_client() + response = await client.get(f"/tasks/{parent_task_id}", params={"opt_fields": "workspace"}) + workspace_id = response["data"]["workspace"]["id"] + + return parent_task_id, project_id, workspace_id + + +async def get_project_by_name_or_raise_error( + project_name: str, + max_items_to_scan: int = MAX_PROJECTS_TO_SCAN_BY_NAME, +) -> dict[str, Any]: + response = await find_projects_by_name( + names=[project_name], + response_limit=100, + max_items_to_scan=max_items_to_scan, + return_projects_not_matched=True, + ) + + if not response["matches"]["projects"]: + projects = response["not_matched"]["projects"] + projects = [{"name": project["name"], "id": project["id"]} for project in projects] + message = ( + f"Project with name '{project_name}' was not found. The search scans up to " + f"{max_items_to_scan} projects. If the user account has a larger number of projects, " + "it's possible that it exists, but the search didn't find it." + ) + additional_prompt = f"Projects available: {json.dumps(projects)}" + raise RetryableToolError( + message=message, + developer_message=f"{message} {additional_prompt}", + additional_prompt_content=additional_prompt, + ) + + elif response["matches"]["count"] > 1: + projects = [ + {"name": project["name"], "id": project["id"]} + for project in response["matches"]["projects"] + ] + message = "Multiple projects found with the same name. Please provide a project ID instead." + additional_prompt = f"Projects matching the name '{project_name}': {json.dumps(projects)}" + raise RetryableToolError( + message=message, + developer_message=message, + additional_prompt_content=additional_prompt, + ) + + return cast(dict, response["matches"]["projects"][0]) + + +async def handle_new_task_tags( + tags: list[str] | None, + workspace_id: str | None, +) -> list[str] | None: + if not tags: + return None + + tag_ids = [] + tag_names = [] + for tag in tags: + if tag.isnumeric(): + tag_ids.append(tag) + else: + tag_names.append(tag) + + if tag_names: + response = await find_tags_by_name(tag_names) + tag_ids.extend([tag["id"] for tag in response["matches"]["tags"]]) + + if response["not_found"]["tags"]: + client = get_asana_client() + + created_tags = [] + for name in response["not_found"]["tags"]: + tag_data = {"name": name, "workspace": workspace_id} + create_response = await client.post("/tags", json_data={"data": tag_data}) + created_tags.append(create_response["data"]["id"]) + + tag_ids.extend(created_tags) + + return tag_ids + + +async def get_tag_ids( + tags: list[str] | None, + max_items_to_scan: int = MAX_TAGS_TO_SCAN_BY_NAME, +) -> list[str] | None: + """ + Returns the IDs of the tags provided in the tags list, which can be either tag IDs or tag names. + + If the tags list is empty, it returns None. + """ + tag_ids = [] + tag_names = [] + + if tags: + for tag in tags: + if tag.isnumeric(): + tag_ids.append(tag) + else: + tag_names.append(tag) + + if tag_names: + searched_tags = await find_tags_by_name( + tag_names, + max_items_to_scan=max_items_to_scan, + return_tags_not_matched=False, + ) + tag_ids.extend([tag["id"] for tag in searched_tags["matches"]["tags"]]) + + return tag_ids if tag_ids else None + + +async def find_projects_by_name( + names: list[str], + team_id: list[str] | None = None, + response_limit: int = 100, + max_items_to_scan: int = MAX_PROJECTS_TO_SCAN_BY_NAME, + return_projects_not_matched: bool = False, +) -> dict[str, Any]: + """Find projects by name.""" + client = get_asana_client() + + # Get all workspaces first + workspaces_response = await client.get("/workspaces") + workspaces = workspaces_response["data"] + + all_projects = [] + + # Search through all workspaces + for workspace in workspaces: + projects_response = await client.get( + f"/workspaces/{workspace['id']}/projects", + params={"limit": min(response_limit, max_items_to_scan)} + ) + all_projects.extend(projects_response["data"]) + + if len(all_projects) >= max_items_to_scan: + break + + # Match projects by name + matches = [] + not_matched = [] + + for name in names: + found = False + for project in all_projects: + if project["name"].lower() == name.lower(): + matches.append(project) + found = True + break + if not found: + not_matched.append(name) + + result = { + "matches": { + "projects": matches, + "count": len(matches) + } + } + + if return_projects_not_matched: + result["not_matched"] = { + "projects": all_projects, + "tags": not_matched + } + + return result + + +async def find_tags_by_name( + names: list[str], + workspace_id: list[str] | None = None, + response_limit: int = 100, + max_items_to_scan: int = MAX_TAGS_TO_SCAN_BY_NAME, + return_tags_not_matched: bool = False, +) -> dict[str, Any]: + """Find tags by name.""" + client = get_asana_client() + + # Get all workspaces first + if not workspace_id: + workspaces_response = await client.get("/workspaces") + workspaces = workspaces_response["data"] + else: + workspaces = [{"id": wid} for wid in workspace_id] + + all_tags = [] + + # Search through all workspaces + for workspace in workspaces: + tags_response = await client.get( + f"/workspaces/{workspace['id']}/tags", + params={"limit": min(response_limit, max_items_to_scan)} + ) + all_tags.extend(tags_response["data"]) + + if len(all_tags) >= max_items_to_scan: + break + + # Match tags by name + matches = [] + not_found = [] + + for name in names: + found = False + for tag in all_tags: + if tag["name"].lower() == name.lower(): + matches.append(tag) + found = True + break + if not found: + not_found.append(name) + + result = { + "matches": { + "tags": matches, + "count": len(matches) + }, + "not_found": { + "tags": not_found + } + } + + if return_tags_not_matched: + result["not_matched"] = { + "tags": all_tags + } + + return result + +async def search_tasks( + keywords: str | None = None, + workspace_id: str | None = None, + assignee_id: str | None = None, + project: str | None = None, + team_id: str | None = None, + tags: list[str] | None = None, + due_on: str | None = None, + due_on_or_after: str | None = None, + due_on_or_before: str | None = None, + start_on: str | None = None, + start_on_or_after: str | None = None, + start_on_or_before: str | None = None, + completed: bool | None = None, + limit: int = 100, + sort_by: TaskSortBy = TaskSortBy.MODIFIED_AT, + sort_order: SortOrder = SortOrder.DESCENDING, +) -> Dict[str, Any]: + """Search for tasks""" + try: + limit = max(1, min(100, limit)) + project_id = None + + if project: + if project.isnumeric(): + project_id = project + else: + project_data = await get_project_by_name_or_raise_error(project) + project_id = project_data["id"] + if not workspace_id: + workspace_id = project_data["workspace"]["id"] + + tag_ids = await get_tag_ids(tags) + + client = get_asana_client() + + validate_date_format("due_on", due_on) + validate_date_format("due_on_or_after", due_on_or_after) + validate_date_format("due_on_or_before", due_on_or_before) + validate_date_format("start_on", start_on) + validate_date_format("start_on_or_after", start_on_or_after) + validate_date_format("start_on_or_before", start_on_or_before) + + if not any([workspace_id, project_id, team_id]): + workspace_id = await get_unique_workspace_id_or_raise_error() + + if not workspace_id and team_id: + from .teams import get_team_by_id + team = await get_team_by_id(team_id) + workspace_id = team["organization"]["id"] + + response = await client.get( + f"/workspaces/{workspace_id}/tasks/search", + params=build_task_search_query_params( + keywords=keywords, + completed=completed, + assignee_id=assignee_id, + project_id=project_id, + team_id=team_id, + tag_ids=tag_ids, + due_on=due_on, + due_on_or_after=due_on_or_after, + due_on_or_before=due_on_or_before, + start_on=start_on, + start_on_or_after=start_on_or_after, + start_on_or_before=start_on_or_before, + limit=limit, + sort_by=sort_by, + sort_order=sort_order, + ), + ) + + tasks_by_id = {task["id"]: task for task in response["data"]} + tasks = list(tasks_by_id.values()) + + return {"tasks": tasks, "count": len(tasks)} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing search_tasks: {e}") + raise e + + +async def get_task_by_id( + task_id: str, + max_subtasks: int = 100, +) -> Dict[str, Any]: + """Get a task by its ID""" + try: + client = get_asana_client() + response = await client.get( + f"/tasks/{task_id}", + params={"opt_fields": ",".join(TASK_OPT_FIELDS)}, + ) + if max_subtasks > 0: + max_subtasks = min(max_subtasks, 100) + subtasks = await get_subtasks_from_a_task(task_id=task_id, limit=max_subtasks) + response["data"]["subtasks"] = subtasks["subtasks"] + return {"task": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_task_by_id: {e}") + raise e + + +async def get_subtasks_from_a_task( + task_id: str, + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """Get subtasks from a task""" + try: + limit = max(1, min(100, limit)) + client = get_asana_client() + response = await client.get( + f"/tasks/{task_id}/subtasks", + params=remove_none_values({ + "limit": limit, + "offset": next_page_token, + "opt_fields": ",".join(TASK_OPT_FIELDS), + }), + ) + + return { + "subtasks": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_subtasks_from_a_task: {e}") + raise e + + +async def update_task( + task_id: str, + name: str | None = None, + completed: bool | None = None, + start_date: str | None = None, + due_date: str | None = None, + description: str | None = None, + assignee_id: str | None = None, +) -> Dict[str, Any]: + """Update a task in Asana""" + try: + client = get_asana_client() + + validate_date_format("start_date", start_date) + validate_date_format("due_date", due_date) + + update_data = remove_none_values({ + "name": name, + "completed": completed, + "start_on": start_date, + "due_on": due_date, + "notes": description, + "assignee": assignee_id, + }) + + response = await client.put(f"/tasks/{task_id}", json_data={"data": update_data}) + return {"task": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing update_task: {e}") + raise e + + +async def mark_task_as_completed( + task_id: str, +) -> Dict[str, Any]: + """Mark a task as completed""" + return await update_task(task_id, completed=True) + + +async def create_task( + name: str, + start_date: str | None = None, + due_date: str | None = None, + description: str | None = None, + parent_task_id: str | None = None, + workspace_id: str | None = None, + project: str | None = None, + assignee_id: str | None = "me", + tags: list[str] | None = None, +) -> Dict[str, Any]: + """Create a task in Asana""" + try: + client = get_asana_client() + + validate_date_format("start_date", start_date) + validate_date_format("due_date", due_date) + + parent_task_id, project_id, workspace_id = await handle_new_task_associations( + parent_task_id, project, workspace_id + ) + + tag_ids = await handle_new_task_tags(tags, workspace_id) + + task_data = remove_none_values({ + "name": name, + "notes": description, + "start_on": start_date, + "due_on": due_date, + "assignee": assignee_id, + "parent": parent_task_id, + "projects": [project_id] if project_id else None, + "workspace": workspace_id, + "tags": tag_ids, + }) + + response = await client.post("/tasks", json_data={"data": task_data}) + return {"task": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing create_task: {e}") + raise e + + +async def attach_file_to_task( + task_id: str, + file_name: str, + file_content_str: str | None = None, + file_content_base64: str | None = None, + file_content_url: str | None = None, + file_encoding: str = "utf-8", +) -> Dict[str, Any]: + """Attach a file to a task""" + try: + client = get_asana_client() + + if not any([file_content_str, file_content_base64, file_content_url]): + raise ValueError("One of file_content_str, file_content_base64, or file_content_url must be provided") + + if file_content_url: + # External URL attachment + attachment_data = { + "name": file_name, + "url": file_content_url, + "parent": task_id, + } + response = await client.post("/attachments", json_data={"data": attachment_data}) + else: + # File upload + if file_content_str: + file_content = file_content_str.encode(file_encoding) + elif file_content_base64: + file_content = base64.b64decode(file_content_base64) + + files = { + "file": (file_name, file_content) + } + data = {"parent": task_id} + + response = await client.post("/attachments", data=data, files=files) + + return {"attachment": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing attach_file_to_task: {e}") + raise e diff --git a/mcp_servers/asana/tools/teams.py b/mcp_servers/asana/tools/teams.py new file mode 100644 index 00000000..d7b27227 --- /dev/null +++ b/mcp_servers/asana/tools/teams.py @@ -0,0 +1,104 @@ +from typing import Annotated, Any, Dict +import logging + +from .constants import TEAM_OPT_FIELDS +from .base import ( + get_asana_client, + get_next_page, + get_unique_workspace_id_or_raise_error, + remove_none_values, + AsanaToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +async def get_team_by_id( + team_id: str, +) -> Dict[str, Any]: + """Get an Asana team by its ID""" + try: + client = get_asana_client() + response = await client.get( + f"/teams/{team_id}", + params=remove_none_values({"opt_fields": ",".join(TEAM_OPT_FIELDS)}), + ) + return {"team": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_team_by_id: {e}") + raise e + + +async def list_teams_the_current_user_is_a_member_of( + workspace_id: str | None = None, + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List teams in Asana that the current user is a member of""" + try: + limit = max(1, min(100, limit)) + + workspace_id = workspace_id or await get_unique_workspace_id_or_raise_error() + + client = get_asana_client() + response = await client.get( + "/users/me/teams", + params=remove_none_values({ + "limit": limit, + "offset": next_page_token, + "opt_fields": ",".join(TEAM_OPT_FIELDS), + "organization": workspace_id, + }), + ) + + return { + "teams": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_teams_the_current_user_is_a_member_of: {e}") + raise e + + +async def list_teams( + workspace_id: str | None = None, + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List teams in an Asana workspace""" + try: + limit = max(1, min(100, limit)) + + workspace_id = workspace_id or await get_unique_workspace_id_or_raise_error() + + client = get_asana_client() + response = await client.get( + f"/workspaces/{workspace_id}/teams", + params=remove_none_values({ + "limit": limit, + "offset": next_page_token, + "opt_fields": ",".join(TEAM_OPT_FIELDS), + }), + ) + + return { + "teams": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_teams: {e}") + raise e diff --git a/mcp_servers/asana/tools/users.py b/mcp_servers/asana/tools/users.py new file mode 100644 index 00000000..8fa4cbef --- /dev/null +++ b/mcp_servers/asana/tools/users.py @@ -0,0 +1,67 @@ +from typing import Annotated, Any, Dict +import logging + +from .constants import USER_OPT_FIELDS +from .base import ( + get_asana_client, + get_next_page, + get_unique_workspace_id_or_raise_error, + remove_none_values, + AsanaToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +async def list_users( + workspace_id: str | None = None, + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List users in Asana""" + try: + limit = max(1, min(100, limit)) + + if not workspace_id: + workspace_id = await get_unique_workspace_id_or_raise_error() + + client = get_asana_client() + response = await client.get( + "/users", + params=remove_none_values({ + "workspace": workspace_id, + "limit": limit, + "offset": next_page_token, + "opt_fields": ",".join(USER_OPT_FIELDS), + }), + ) + + return { + "users": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_users: {e}") + raise e + + +async def get_user_by_id( + user_id: str, +) -> Dict[str, Any]: + """Get a user by ID""" + try: + client = get_asana_client() + response = await client.get(f"/users/{user_id}", params={"opt_fields": ",".join(USER_OPT_FIELDS)}) + return {"user": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_user_by_id: {e}") + raise e diff --git a/mcp_servers/asana/tools/workspaces.py b/mcp_servers/asana/tools/workspaces.py new file mode 100644 index 00000000..2d6bdf06 --- /dev/null +++ b/mcp_servers/asana/tools/workspaces.py @@ -0,0 +1,61 @@ +from typing import Any, Dict +import logging + +from .constants import WORKSPACE_OPT_FIELDS +from .base import ( + get_asana_client, + get_next_page, + remove_none_values, + AsanaToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +async def get_workspace_by_id( + workspace_id: str, +) -> Dict[str, Any]: + """Get an Asana workspace by its ID""" + try: + client = get_asana_client() + response = await client.get(f"/workspaces/{workspace_id}") + return {"workspace": response["data"]} + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing get_workspace_by_id: {e}") + raise e + + +async def list_workspaces( + limit: int = 100, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List workspaces in Asana that are visible to the authenticated user""" + try: + limit = max(1, min(100, limit)) + + client = get_asana_client() + response = await client.get( + "/workspaces", + params=remove_none_values({ + "limit": limit, + "offset": next_page_token, + "opt_fields": ",".join(WORKSPACE_OPT_FIELDS), + }), + ) + + return { + "workspaces": response["data"], + "count": len(response["data"]), + "next_page": get_next_page(response), + } + + except AsanaToolExecutionError as e: + logger.error(f"Asana API error: {e}") + raise RuntimeError(f"Asana API Error: {e}") + except Exception as e: + logger.exception(f"Error executing list_workspaces: {e}") + raise e diff --git a/mcp_servers/attio/.env.example b/mcp_servers/attio/.env.example new file mode 100644 index 00000000..c7b37ae6 --- /dev/null +++ b/mcp_servers/attio/.env.example @@ -0,0 +1,2 @@ +# Attio API credentials +ATTIO_API_KEY=your-actual-api-key-here diff --git a/mcp_servers/attio/.eslintrc.json b/mcp_servers/attio/.eslintrc.json new file mode 100644 index 00000000..390256c5 --- /dev/null +++ b/mcp_servers/attio/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": { + // Package-specific rules can go here + } +} \ No newline at end of file diff --git a/mcp_servers/attio/Dockerfile b/mcp_servers/attio/Dockerfile new file mode 100644 index 00000000..cb703018 --- /dev/null +++ b/mcp_servers/attio/Dockerfile @@ -0,0 +1,35 @@ +# Use a Node.js image as the base for building the application +FROM node:22-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/attio/package.json mcp_servers/attio/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/attio . + +# Build the application using TypeScript +RUN npm run build + +# Use a smaller Node.js image for the final image +FROM node:22-slim AS release + +# Set the working directory inside the container +WORKDIR /app + +# Copy the built application from the builder stage +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json +# Install only production dependencies +RUN npm ci --omit=dev --ignore-scripts + +EXPOSE 5000 + +# Specify the command to run the application +ENTRYPOINT ["node", "dist/index.js"] \ No newline at end of file diff --git a/mcp_servers/attio/README.md b/mcp_servers/attio/README.md new file mode 100644 index 00000000..37a60b8b --- /dev/null +++ b/mcp_servers/attio/README.md @@ -0,0 +1,78 @@ +# Attio MCP Server + +A Model Context Protocol (MCP) server for Attio CRM integration. Manage contacts, deals, and customer relationships using Attio's modern CRM API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Attio with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("ATTIO", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/attio-mcp-server:latest + + +# Run Attio MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/attio-mcp-server:latest + + +# Run Attio MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_attio_api_token_here"}' \ + ghcr.io/klavis-ai/attio-mcp-server:latest +``` + +**OAuth Setup:** Attio requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Contact Management**: Create and manage person and company records +- **Deal Pipeline**: Handle opportunities and sales processes +- **Custom Objects**: Work with custom data models and fields +- **Activity Tracking**: Track interactions and communications +- **Workflow Automation**: Manage automated workflows and triggers + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/attio/index.ts b/mcp_servers/attio/index.ts new file mode 100644 index 00000000..16015bf5 --- /dev/null +++ b/mcp_servers/attio/index.ts @@ -0,0 +1,1095 @@ +import express, { Request, Response } from 'express'; +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + Tool, + CallToolRequestSchema, + ListToolsRequestSchema, +} from '@modelcontextprotocol/sdk/types.js'; +import { AsyncLocalStorage } from 'async_hooks'; +import dotenv from 'dotenv'; + +dotenv.config(); + +// Attio API configuration +const ATTIO_API_URL = 'https://api.attio.com/v2'; + +// Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + attioClient: AttioClient; +}>(); + +// Attio API Client +class AttioClient { + private accessToken: string; + private baseUrl: string; + + constructor(accessToken: string, baseUrl: string = ATTIO_API_URL) { + this.accessToken = accessToken; + this.baseUrl = baseUrl; + } + + private async makeRequest(endpoint: string, options: RequestInit = {}): Promise { + const url = `${this.baseUrl}${endpoint}`; + const headers = { + 'Authorization': `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + ...options.headers, + }; + + const response = await fetch(url, { + ...options, + headers, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`Attio API error: ${response.status} ${response.statusText} - ${errorText}`); + } + + return response.json(); + } + + async searchPeople(filters: any = {}, limit: number = 25): Promise { + if (Object.keys(filters).length === 0) { + return this.makeRequest('/objects/people/records/query', { + method: 'POST', + body: JSON.stringify({ + limit + }), + }); + } + return this.makeRequest('/objects/people/records/query', { + method: 'POST', + body: JSON.stringify({ + filter: filters, + limit + }), + }); + } + + async searchCompanies(filters: any = {}, limit: number = 25): Promise { + if (Object.keys(filters).length === 0) { + return this.makeRequest('/objects/companies/records/query', { + method: 'POST', + body: JSON.stringify({ + limit + }), + }); + } + return this.makeRequest('/objects/companies/records/query', { + method: 'POST', + body: JSON.stringify({ + filter: filters, + limit + }), + }); + } + + async searchDeals(filters: any = {}, limit: number = 25): Promise { + if (Object.keys(filters).length === 0) { + return this.makeRequest('/objects/deals/records/query', { + method: 'POST', + body: JSON.stringify({ + limit + }), + }); + } + return this.makeRequest('/objects/deals/records/query', { + method: 'POST', + body: JSON.stringify({ + filter: filters, + limit + }), + }); + } + + async searchNotes(query: string, limit: number = 50): Promise { + // First, get all notes (up to the limit) + const allNotes = await this.makeRequest(`/notes?limit=${limit}`, { + method: 'GET', + }); + + // If no query provided, return all notes + if (!query || query.trim() === '') { + return allNotes; + } + + // Filter notes based on query matching title or content + const queryLower = query.toLowerCase(); + const filteredNotes = allNotes.data.filter((note: any) => { + const titleMatch = note.title?.toLowerCase().includes(queryLower); + const contentPlaintextMatch = note.content_plaintext?.toLowerCase().includes(queryLower); + const contentMarkdownMatch = note.content_markdown?.toLowerCase().includes(queryLower); + + return titleMatch || contentPlaintextMatch || contentMarkdownMatch; + }); + + return { + ...allNotes, + data: filteredNotes + }; + } + + async createNote(data: { + parent_object: string; + parent_record_id: string; + title: string; + content: string; + format?: 'plaintext' | 'markdown'; + }): Promise { + return this.makeRequest('/notes', { + method: 'POST', + body: JSON.stringify({ + data: { + parent_object: data.parent_object, + parent_record_id: data.parent_record_id, + title: data.title, + format: data.format || 'plaintext', + content: data.content + } + }), + }); + } + + async createPerson(data: { + name?: string; + email_addresses?: string[]; + phone_numbers?: string[]; + job_title?: string; + description?: string; + }): Promise { + const recordData: any = {}; + + if (data.name) { recordData.name = data.name; } + if (data.email_addresses) { recordData.email_addresses = data.email_addresses; } + if (data.phone_numbers) { + for (const phoneNumber of data.phone_numbers) { + recordData.phone_numbers.push({ original_phone_number: phoneNumber }); + } + } + if (data.job_title) { recordData.job_title = data.job_title; } + if (data.description) { recordData.description = data.description; } + + return this.makeRequest('/objects/people/records', { + method: 'POST', + body: JSON.stringify({ + data: { + values: recordData + } + }), + }); + } + + async createCompany(data: { + name?: string; + domains?: string[]; + description?: string; + }): Promise { + const recordData: any = {}; + + if (data.name) recordData.name = data.name; + if (data.domains) recordData.domains = data.domains; + if (data.description) recordData.description = data.description; + + return this.makeRequest('/objects/companies/records', { + method: 'POST', + body: JSON.stringify({ + data: { + values: recordData + } + }), + }); + } + + async updatePerson(recordId: string, data: { + name?: string; + email_addresses?: string[]; + phone_numbers?: string[]; + job_title?: string; + description?: string; + company_id?: string; + }): Promise { + const recordData: any = {}; + + if (data.name) { recordData.name = data.name; } + if (data.email_addresses) { recordData.email_addresses = data.email_addresses; } + if (data.phone_numbers) { + for (const phoneNumber of data.phone_numbers) { + recordData.phone_numbers.push({ original_phone_number: phoneNumber }); + } + } + if (data.job_title) { recordData.job_title = data.job_title; } + if (data.description) { recordData.description = data.description; } + + return this.makeRequest(`/objects/people/records/${recordId}`, { + method: 'PATCH', + body: JSON.stringify({ + data: { + values: recordData + } + }), + }); + } + + async updateCompany(recordId: string, data: { + name?: string; + domains?: string[]; + description?: string; + }): Promise { + const recordData: any = {}; + + if (data.name) recordData.name = data.name; + if (data.domains) recordData.domains = data.domains; + if (data.description) recordData.description = data.description; + + return this.makeRequest(`/objects/companies/records/${recordId}`, { + method: 'PATCH', + body: JSON.stringify({ + data: { + values: recordData + } + }), + }); + } + + async createTask(data: { + content_plaintext: string; + deadline_at?: string; + assignee_emails: string[]; + }): Promise { + const taskData: any = { + content: data.content_plaintext, + format: 'plaintext', + is_completed: false, + }; + + if (data.deadline_at) { + taskData.deadline_at = data.deadline_at; + } + + taskData.assignees = data.assignee_emails.map(email => ({ + workspace_member_email_address: email + })); + + // Add linked_records for each assignee email to connect to people records + taskData.linked_records = data.assignee_emails.map(email => ({ + target_object: "people", + email_addresses: [ + { + email_address: email + } + ] + })); + + console.log(JSON.stringify({ + data: taskData + })); + return this.makeRequest('/tasks', { + method: 'POST', + body: JSON.stringify({ + data: taskData + }), + }); + } +} + +// Getter function for the client +function getAttioClient() { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Attio client not found in AsyncLocalStorage'); + } + return store.attioClient; +} + +// Tool definitions +const SEARCH_PEOPLE_TOOL: Tool = { + name: 'attio_search_people', + description: 'Search for people in your Attio workspace with advanced filtering options. If no parameter other than limit is provided, it will search all people.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'string', + description: 'Search query for people (searches across name, email, company, job title, description, etc.)', + }, + email: { + type: 'string', + description: 'Filter by email address', + }, + record_id: { + type: 'string', + description: 'Filter by specific person record ID', + }, + limit: { + type: 'number', + description: 'Maximum number of results to return (default: 25, max: 50)', + default: 25, + }, + }, + }, + annotations: { + category: 'ATTIO_PERSON', + readOnlyHint: true + }, +}; + +const SEARCH_COMPANIES_TOOL: Tool = { + name: 'attio_search_companies', + description: 'Search for companies in your Attio workspace with filtering and sorting. If no parameter other than limit is provided, it will search all companies.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'string', + description: 'Search query for companies (searches across name, domain, description, employees names, employees descriptions, etc.)', + }, + domain: { + type: 'string', + description: 'Filter by company domain', + }, + record_id: { + type: 'string', + description: 'Filter by specific company record ID', + }, + limit: { + type: 'number', + description: 'Maximum number of results to return (default: 25, max: 50)', + default: 25, + }, + }, + }, + annotations: { + category: 'ATTIO_COMPANY', + readOnlyHint: true + }, +}; + +const SEARCH_DEALS_TOOL: Tool = { + name: 'attio_search_deals', + description: 'Search for deals in your Attio workspace with stage and value filtering. If no parameter other than limit is provided, it will search all deals.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'Filter by deal name', + }, + stage: { + type: 'string', + description: 'Filter by deal stage (one of "Lead", "In Progress", "Won šŸŽ‰", "Lost")', + }, + minValue: { + type: 'number', + description: 'Minimum deal value', + }, + maxValue: { + type: 'number', + description: 'Maximum deal value', + }, + record_id: { + type: 'string', + description: 'Filter by specific deal record ID', + }, + limit: { + type: 'number', + description: 'Maximum number of results to return (default: 25, max: 50)', + default: 25, + }, + }, + }, + annotations: { + category: 'ATTIO_DEAL', + readOnlyHint: true + }, +}; + +const SEARCH_NOTES_TOOL: Tool = { + name: 'attio_search_notes', + description: 'Search for notes across all objects in your Attio workspace by fetching all notes and filtering by content.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'string', + description: 'Search query for notes content (searches title, plaintext content, and markdown content). Leave empty to get all notes.', + }, + limit: { + type: 'number', + description: 'Maximum number of notes to fetch and search through (default: 50, max: 50)', + default: 50, + }, + }, + }, + annotations: { + category: 'ATTIO_NOTE', + readOnlyHint: true + }, +}; + +const CREATE_NOTE_TOOL: Tool = { + name: 'attio_create_note', + description: 'Create a new note for a given record in Attio.', + inputSchema: { + type: 'object', + properties: { + parent_object: { + type: 'string', + description: 'The object type to attach the note to (e.g., "people", "companies", "deals")', + enum: ['people', 'companies', 'deals'] + }, + parent_record_id: { + type: 'string', + description: 'The ID of the record to attach the note to' + }, + title: { + type: 'string', + description: 'Title of the note' + }, + content: { + type: 'string', + description: 'Content of the note' + }, + format: { + type: 'string', + description: 'Format of the note content', + enum: ['plaintext', 'markdown'], + default: 'plaintext' + } + }, + required: ['parent_object', 'parent_record_id', 'title', 'content'], + }, + annotations: { + category: 'ATTIO_NOTE' + }, +}; + +const CREATE_PERSON_TOOL: Tool = { + name: 'attio_create_person', + description: 'Create a new person record in your Attio workspace.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'Full name of the person', + }, + email_addresses: { + type: 'array', + items: { type: 'string' }, + description: 'Array of email addresses for the person', + }, + phone_numbers: { + type: 'array', + items: { type: 'string' }, + description: 'Array of phone numbers for the person', + }, + job_title: { + type: 'string', + description: 'Job title of the person', + }, + description: { + type: 'string', + description: 'Description or notes about the person', + }, + }, + }, + annotations: { + category: 'ATTIO_PERSON' + }, +}; + +const CREATE_COMPANY_TOOL: Tool = { + name: 'attio_create_company', + description: 'Create a new company record in your Attio workspace.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'Name of the company', + }, + domains: { + type: 'array', + items: { type: 'string' }, + description: 'Array of domain names associated with the company', + }, + description: { + type: 'string', + description: 'Description of the company', + }, + }, + }, + annotations: { + category: 'ATTIO_COMPANY' + }, +}; + +const UPDATE_PERSON_TOOL: Tool = { + name: 'attio_update_person', + description: 'Update an existing person record in your Attio workspace.', + inputSchema: { + type: 'object', + properties: { + record_id: { + type: 'string', + description: 'ID of the person record to update', + }, + name: { + type: 'string', + description: 'Full name of the person', + }, + email_addresses: { + type: 'array', + items: { type: 'string' }, + description: 'Array of email addresses for the person', + }, + phone_numbers: { + type: 'array', + items: { type: 'string' }, + description: 'Array of phone numbers for the person', + }, + job_title: { + type: 'string', + description: 'Job title of the person', + }, + description: { + type: 'string', + description: 'Description or notes about the person', + }, + }, + required: ['record_id'], + }, + annotations: { + category: 'ATTIO_PERSON' + }, +}; + +const UPDATE_COMPANY_TOOL: Tool = { + name: 'attio_update_company', + description: 'Update an existing company record in your Attio workspace.', + inputSchema: { + type: 'object', + properties: { + record_id: { + type: 'string', + description: 'ID of the company record to update', + }, + name: { + type: 'string', + description: 'Name of the company', + }, + domains: { + type: 'array', + items: { type: 'string' }, + description: 'Array of domain names associated with the company', + }, + description: { + type: 'string', + description: 'Description of the company', + }, + }, + required: ['record_id'], + }, + annotations: { + category: 'ATTIO_COMPANY' + }, +}; + +const CREATE_TASK_TOOL: Tool = { + name: 'attio_create_task', + description: 'Create a new task in your Attio workspace. Tasks must be assigned to workspace members and can have optional deadlines.', + inputSchema: { + type: 'object', + properties: { + content_plaintext: { + type: 'string', + description: 'The plaintext content of the task (e.g., "Follow up on current software solutions")', + }, + deadline_at: { + type: 'string', + description: 'Optional deadline for the task as an ISO 8601 timestamp (e.g., "2023-01-01T15:00:00.000000000Z")', + }, + assignee_emails: { + type: 'array', + items: { type: 'string' }, + description: 'Array of workspace member email addresses to assign the task to (e.g., ["alice@attio.com", "bob@attio.com"])', + }, + }, + required: ['content_plaintext', 'assignee_emails'], + }, + annotations: { + category: 'ATTIO_TASK' + }, +}; + +// Utility functions +function safeLog(level: 'error' | 'debug' | 'info' | 'notice' | 'warning' | 'critical' | 'alert' | 'emergency', data: any): void { + try { + const logData = typeof data === 'object' ? JSON.stringify(data, null, 2) : data; + console.log(`[${level.toUpperCase()}] ${logData}`); + } catch (error) { + console.log(`[${level.toUpperCase()}] [LOG_ERROR] Could not serialize log data`); + } +} + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Attio access token is missing. Provide it via ACCESS_TOKEN env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +// Main server function +const getAttioMcpServer = () => { + const server = new Server( + { + name: 'attio-mcp-server', + version: '1.0.0', + }, + { + capabilities: { + tools: {}, + }, + } + ); + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + SEARCH_PEOPLE_TOOL, + SEARCH_COMPANIES_TOOL, + SEARCH_DEALS_TOOL, + SEARCH_NOTES_TOOL, + CREATE_NOTE_TOOL, + CREATE_PERSON_TOOL, + CREATE_COMPANY_TOOL, + UPDATE_PERSON_TOOL, + UPDATE_COMPANY_TOOL, + CREATE_TASK_TOOL, + ], + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + try { + switch (name) { + case 'attio_search_people': { + const client = getAttioClient(); + const filters: any = {}; + + if (args?.record_id) { + filters.record_id = { $eq: args.record_id }; + } else { + if (args?.query) { + filters.$or = [ + { name: { $contains: args.query } }, + { email_addresses: { $contains: args.query } }, + { description: { $contains: args.query } }, + { job_title: { $contains: args.query } }, + { + path: [ + ["people", "company"], + ["companies", "name"] + ], + constraints: { + $contains: args.query + } + }, + { + path: [ + ["people", "company"], + ["companies", "description"] + ], + constraints: { + $contains: args.query + } + }, + { primary_location: { locality: { $contains: args.query } } }, + ]; + } + if (args?.email) { + filters.email_addresses = args.email; + } + } + + const result = await client.searchPeople(filters, (args?.limit as number) || 25); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_search_companies': { + const client = getAttioClient(); + const filters: any = {}; + + if (args?.record_id) { + filters.record_id = { $eq: args.record_id }; + } else { + if (args?.query) { + filters.$or = [ + { name: { $contains: args.query } }, + { domains: { domain: { $contains: args.query } } }, + { description: { $contains: args.query } }, + { primary_location: { locality: { $contains: args.query } } }, + { + path: [ + ["companies", "team"], + ["people", "name"] + ], + constraints: { + $contains: args.query + } + }, + { + path: [ + ["companies", "team"], + ["people", "description"] + ], + constraints: { + $contains: args.query + } + }, + ]; + } + if (args?.domain) { + filters.domains = { domain: args.domain }; + } + } + + const result = await client.searchCompanies(filters, (args?.limit as number) || 25); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_search_deals': { + const client = getAttioClient(); + const filters: any = {}; + + if (args?.record_id) { + filters.record_id = { $eq: args.record_id }; + } else { + if (args?.name) { + filters.name = { $contains: args.name }; + } + if (args?.stage) { + filters.stage = args.stage; + } + if (args?.minValue !== undefined || args?.maxValue !== undefined) { + filters.value = {}; + if (args?.minValue !== undefined) { + filters.value.$gte = args.minValue; + } + if (args?.maxValue !== undefined) { + filters.value.$lte = args.maxValue; + } + } + } + + const result = await client.searchDeals(filters, (args?.limit as number) || 25); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_search_notes': { + const client = getAttioClient(); + const result = await client.searchNotes((args as any)?.query || '', (args?.limit as number) || 50); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_create_note': { + const client = getAttioClient(); + + const result = await client.createNote({ + parent_object: (args as any)?.parent_object, + parent_record_id: (args as any)?.parent_record_id, + title: (args as any)?.title, + content: (args as any)?.content, + format: (args as any)?.format || 'plaintext' + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_create_person': { + const client = getAttioClient(); + + const result = await client.createPerson({ + name: (args as any)?.name, + email_addresses: (args as any)?.email_addresses, + phone_numbers: (args as any)?.phone_numbers, + job_title: (args as any)?.job_title, + description: (args as any)?.description, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_create_company': { + const client = getAttioClient(); + + const result = await client.createCompany({ + name: (args as any)?.name, + domains: (args as any)?.domains, + description: (args as any)?.description, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_update_person': { + const client = getAttioClient(); + + const result = await client.updatePerson((args as any)?.record_id, { + name: (args as any)?.name, + email_addresses: (args as any)?.email_addresses, + phone_numbers: (args as any)?.phone_numbers, + job_title: (args as any)?.job_title, + description: (args as any)?.description, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_update_company': { + const client = getAttioClient(); + + const result = await client.updateCompany((args as any)?.record_id, { + name: (args as any)?.name, + domains: (args as any)?.domains, + description: (args as any)?.description, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + case 'attio_create_task': { + const client = getAttioClient(); + + const result = await client.createTask({ + content_plaintext: (args as any)?.content_plaintext, + deadline_at: (args as any)?.deadline_at, + assignee_emails: (args as any)?.assignee_emails, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + default: + throw new Error(`Unknown tool: ${name}`); + } + } catch (error: any) { + safeLog('error', `Tool ${name} failed: ${error.message}`); + return { + content: [ + { + type: 'text', + text: `Error: ${error.message}`, + }, + ], + isError: true, + }; + } + }); + + return server; +}; + +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const accessToken = extractAccessToken(req); + const attioClient = new AttioClient(accessToken); + + const server = getAttioMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ attioClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +// to support multiple simultaneous connections we have a lookup object from +// sessionId to transport +const transports = new Map(); + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getAttioMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + const transport = transports.get(sessionId); + if (transport) { + const accessToken = extractAccessToken(req); + const attioClient = new AttioClient(accessToken); + + asyncLocalStorage.run({ attioClient }, async () => { + await transport.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +app.listen(5000, () => { + console.log('server running on port 5000'); +}); \ No newline at end of file diff --git a/mcp_servers/attio/package-lock.json b/mcp_servers/attio/package-lock.json new file mode 100644 index 00000000..1e6b665a --- /dev/null +++ b/mcp_servers/attio/package-lock.json @@ -0,0 +1,6296 @@ +{ + "name": "@klavis-ai/mcp-server-attio", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-attio", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.7", + "express": "^5.1.0", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1" + }, + "bin": { + "attio-mcp": "dist/index.js" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/express": "^5.0.1", + "@types/jest": "^29.5.14", + "@types/node": "^20.10.5", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", + "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.10", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz", + "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.0.tgz", + "integrity": "sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.26.8", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", + "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz", + "integrity": "sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", + "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", + "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz", + "integrity": "sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz", + "integrity": "sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.27.0", + "@babel/parser": "^7.27.0", + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", + "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.1.tgz", + "integrity": "sha512-soEIOALTfTK6EjmKMMoLugwaP0rzkad90iIWd1hMO9ARkSAyjfMfkRRhLvD5qH7vvM0Cg72pieUfR6yh6XxC4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.3.tgz", + "integrity": "sha512-DyVYSOafBvk3/j1Oka4z5BWT8o4AFmoNyZY9pALOm7Lh3GZglR71Co4r4dEUoqDWdDazIZQHBe7J2Nwkg6gHgQ==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.1.tgz", + "integrity": "sha512-UZUw8vjpWFXuDnjFTh7/5c2TWDlQqeXHi6hcN7F2XSVT5P+WmUnnbFS3KA6Jnc6IsEqI2qCVu2bK0R0J4A8ZQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.17.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.30.tgz", + "integrity": "sha512-7zf4YyHA+jvBNfVrk2Gtvs6x7E8V+YDW05bNfG2XkWDJfYRXrTiP/DsB2zSYTaHX0bGIujTBQdMVAhb+j7mwpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz", + "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/type-utils": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz", + "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz", + "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.18.0.tgz", + "integrity": "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz", + "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz", + "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.18.0.tgz", + "integrity": "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz", + "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.24.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", + "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001688", + "electron-to-chromium": "^1.5.73", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001712", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001712.tgz", + "integrity": "sha512-MBqPpGYYdQ7/hfKiet9SCI+nmN5/hp4ZzveOJubl5DTAMa5oggjAuoi0Z4onBpKPFI2ePGnQuQIzF3VxDjDJig==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dotenv": { + "version": "16.4.7", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.132", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.132.tgz", + "integrity": "sha512-QgX9EBvWGmvSRa74zqfnG7+Eno0Ak0vftBll0Pt2/z5b3bEGYL6OUXLgKPtvx73dn3dvwrlyVkjPKRRlhLYTEg==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "license": "MIT" + }, + "node_modules/eventsource": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.6.tgz", + "integrity": "sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", + "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jake/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jake/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock-extended": { + "version": "4.0.0-beta1", + "resolved": "https://registry.npmjs.org/jest-mock-extended/-/jest-mock-extended-4.0.0-beta1.tgz", + "integrity": "sha512-MYcI0wQu3ceNhqKoqAJOdEfsVMamAFqDTjoLN5Y45PAG3iIm4WGnhOu0wpMjlWCexVPO71PMoNir9QrGXrnIlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "ts-essentials": "^10.0.2" + }, + "peerDependencies": { + "@jest/globals": "^28.0.0 || ^29.0.0", + "jest": "^24.0.0 || ^25.0.0 || ^26.0.0 || ^27.0.0 || ^28.0.0 || ^29.0.0", + "typescript": "^3.0.0 || ^4.0.0 || ^5.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-8.1.0.tgz", + "integrity": "sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw==", + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^6.1.2" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.4.tgz", + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", + "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-cwd/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "license": "BSD-3-Clause", + "dependencies": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shx": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.3", + "shelljs": "^0.8.5" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ts-api-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", + "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-essentials": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-10.0.4.tgz", + "integrity": "sha512-lwYdz28+S4nicm+jFi6V58LaAIpxzhg9rLdgNC1VsdP/xiFBseGhF1M/shwCk6zMmwahBZdXcl34LVHrEang3A==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">=4.5.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/ts-jest": { + "version": "29.3.1", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.1.tgz", + "integrity": "sha512-FT2PIRtZABwl6+ZCry8IY7JZ3xMuppsEV9qFVHOVe8jDzggwUZ9TsM4chyJxL9yi6LvkqcZYU3LmapEE454zBQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "ejs": "^3.1.10", + "fast-json-stable-stringify": "^2.1.0", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.1", + "type-fest": "^4.38.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.39.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.39.1.tgz", + "integrity": "sha512-uW9qzd66uyHYxwyVBYiwS4Oi0qZyUqwjU+Oevr6ZogYiXt99EOYtwvzMSLw1c3lYo2HzJsep/NB23iEVEgjG/w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.24.2", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz", + "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.5", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz", + "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/attio/package.json b/mcp_servers/attio/package.json new file mode 100644 index 00000000..e235bc8d --- /dev/null +++ b/mcp_servers/attio/package.json @@ -0,0 +1,60 @@ +{ + "name": "@klavis-ai/mcp-server-attio", + "version": "1.0.0", + "description": "MCP server for Attio CRM integration.", + "type": "module", + "bin": { + "attio-mcp": "dist/index.js" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsc && node -e \"require('fs').chmodSync('dist/index.js', '755')\"", + "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js", + "start": "node dist/index.js", + "lint": "eslint src/**/*.ts", + "lint:fix": "eslint src/**/*.ts --fix", + "format": "prettier --write .", + "prepare": "npm run build", + "publish": "npm run build && npm publish" + }, + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.7", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1", + "express": "^5.1.0" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/jest": "^29.5.14", + "@types/node": "^20.10.5", + "@types/express": "^5.0.1", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "mcp", + "attio", + "crm", + "customer-relationship-management", + "api-integration" + ], + "author": "Klavis AI" +} \ No newline at end of file diff --git a/mcp_servers/attio/tsconfig.json b/mcp_servers/attio/tsconfig.json new file mode 100644 index 00000000..b511d2b8 --- /dev/null +++ b/mcp_servers/attio/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "./dist", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./**/*.ts"], + "exclude": [ + "node_modules", + "dist", + "tests" + ] +} \ No newline at end of file diff --git a/mcp_servers/brave_search/.env.example b/mcp_servers/brave_search/.env.example new file mode 100644 index 00000000..ca1ee4f3 --- /dev/null +++ b/mcp_servers/brave_search/.env.example @@ -0,0 +1,2 @@ +BRAVE_SEARCH_API_KEY='' +BRAVE_SEARCH_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/brave_search/Dockerfile b/mcp_servers/brave_search/Dockerfile new file mode 100644 index 00000000..7c994ada --- /dev/null +++ b/mcp_servers/brave_search/Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies (e.g., gcc for some Python deps) +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only requirements file first to leverage build cache +COPY mcp_servers/brave_search/requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the application code +COPY mcp_servers/brave_search/server.py . +COPY mcp_servers/brave_search/tools/ ./tools/ + +# Expose the port used by the brave_search MCP server (change if needed) +EXPOSE 5000 + +# Default command to run the server +CMD ["python", "server.py"] diff --git a/mcp_servers/brave_search/README.md b/mcp_servers/brave_search/README.md new file mode 100644 index 00000000..b12a2c4f --- /dev/null +++ b/mcp_servers/brave_search/README.md @@ -0,0 +1,73 @@ +# Brave Search MCP Server + +A Model Context Protocol (MCP) server for Brave Search integration. Perform web searches, news searches, image searches, and video searches using Brave's Search API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Brave Search with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("BRAVE_SEARCH", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/brave-search-mcp-server:latest + + +# Run Brave Search MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/brave-search-mcp-server:latest +``` + +**API Key Setup:** Get your Brave Search API key from the [Brave Search API Dashboard](https://api.search.brave.com/). + +## šŸ› ļø Available Tools + +- **Web Search**: Comprehensive web search with ranking and snippets +- **News Search**: Search for recent news articles and updates +- **Image Search**: Find images with metadata and source information +- **Video Search**: Search for videos across platforms +- **Search Filters**: Apply various filters for refined results + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/brave_search/requirements.txt b/mcp_servers/brave_search/requirements.txt new file mode 100644 index 00000000..fef25453 --- /dev/null +++ b/mcp_servers/brave_search/requirements.txt @@ -0,0 +1 @@ +mcp==1.11.0 \ No newline at end of file diff --git a/mcp_servers/brave_search/server.py b/mcp_servers/brave_search/server.py new file mode 100644 index 00000000..fa24039c --- /dev/null +++ b/mcp_servers/brave_search/server.py @@ -0,0 +1,432 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import List + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( +auth_token_context, +brave_web_search, +brave_video_search, +brave_news_search, +brave_image_search +) + + + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +BRAVE_SEARCH_MCP_SERVER_PORT = int(os.getenv("BRAVE_SEARCH_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + + +@click.command() +@click.option("--port", default=BRAVE_SEARCH_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) + +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("brave-search-mcp-server") +#------------------------------------------------------------------- + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="brave_web_search", + description=""" + Perform a Brave web search. + + Typical use: get live web results by query, with optional pagination, country, language, and safesearch filters. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Required. The search query. Max 400 chars & 50 words." + }, + "count": { + "type": "integer", + "description": "Number of results to return (max 20, default 5)." + }, + "offset": { + "type": "integer", + "description": "Zero-based offset for pagination." + }, + "country": { + "type": "string", + "description": "2-letter country code to localize results, e.g., 'US'." + }, + "search_lang": { + "type": "string", + "description": "Language code for search results, e.g., 'en'." + }, + "safesearch": { + "type": "string", + "enum": ["off", "moderate", "strict"], + "description": "Filter adult content." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations( + **{"category": "BRAVE_SEARCH", "readOnlyHint": True} + ), + ), + + types.Tool( + name="brave_image_search", + description=""" + Perform a Brave image search by query. + + Supports safesearch filtering, language and country localization, and pagination. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "[Required] Search term for images. Max 400 chars & 50 words." + }, + "count": { + "type": "integer", + "description": "Number of image results to return (default: 5, max: 200)." + }, + "offset": { + "type": "integer", + "description": "Zero-based offset for pagination." + }, + "search_lang": { + "type": "string", + "description": "Language code for image results, e.g., 'en'." + }, + "country": { + "type": "string", + "description": "2-letter country code to localize results, e.g., 'US'." + }, + "safesearch": { + "type": "string", + "enum": ["off", "strict"], + "description": "Adult content filter: 'off' or 'strict'." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations( + **{"category": "BRAVE_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="brave_news_search", + description=""" + Perform a Brave news search by query. + + Supports safesearch filtering, language and country localization, pagination, and freshness filter to get recent news. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "[Required] Search term for news articles. Max 400 chars & 50 words." + }, + "count": { + "type": "integer", + "description": "Number of news results to return (default: 5, max: 50)." + }, + "offset": { + "type": "integer", + "description": "Zero-based offset for pagination." + }, + "country": { + "type": "string", + "description": "2-letter country code to localize results, e.g., 'US'." + }, + "search_lang": { + "type": "string", + "description": "Language code for news results, e.g., 'en'." + }, + "safesearch": { + "type": "string", + "enum": ["off", "moderate", "strict"], + "description": "Adult content filter: 'off', 'moderate', or 'strict'." + }, + "freshness": { + "type": "string", + "description": "Filter by recency: 'pd' (24h), 'pw' (7d), 'pm' (31d), 'py' (year), or custom 'YYYY-MM-DDtoYYYY-MM-DD'." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations( + **{"category": "BRAVE_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="brave_video_search", + description=""" + Perform a Brave video search by query. + Supports safesearch filtering, language and country localization, pagination, and freshness filter to get recent videos. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "[Required] Search term for videos. Max 400 chars & 50 words." + }, + "count": { + "type": "integer", + "description": "Number of video results to return (default: 5, max: 50)." + }, + "offset": { + "type": "integer", + "description": "Zero-based offset for pagination." + }, + "country": { + "type": "string", + "description": "2-letter country code to localize results, e.g., 'US'." + }, + "search_lang": { + "type": "string", + "description": "Language code for video results, e.g., 'en'." + }, + "safesearch": { + "type": "string", + "enum": ["off", "moderate", "strict"], + "description": "Adult content filter: 'off', 'moderate', or 'strict'." + }, + "freshness": { + "type": "string", + "description": "Filter by discovery date: 'pd' (24h), 'pw' (7d), 'pm' (31d), 'py' (year), or custom 'YYYY-MM-DDtoYYYY-MM-DD'." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations( + **{"category": "BRAVE_SEARCH", "readOnlyHint": True} + ), + ) + + ] + + @app.call_tool() + async def call_tool( + name: str, + arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "brave_web_search": + try: + result = await brave_web_search( + query=arguments["query"], + count=arguments.get("count"), + offset=arguments.get("offset"), + country=arguments.get("country"), + search_lang=arguments.get("search_lang"), + safesearch=arguments.get("safesearch") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in brave_search: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "brave_image_search": + try: + result = await brave_image_search( + query=arguments["query"], + count=arguments.get("count"), + offset=arguments.get("offset"), + search_lang=arguments.get("search_lang"), + country=arguments.get("country"), + safesearch=arguments.get("safesearch") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in brave_image_search: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "brave_news_search": + try: + result = await brave_news_search( + query=arguments["query"], + count=arguments.get("count"), + offset=arguments.get("offset"), + country=arguments.get("country"), + search_lang=arguments.get("search_lang"), + safesearch=arguments.get("safesearch"), + freshness=arguments.get("freshness") + ) + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in brave_news_search: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "brave_video_search": + try: + result = await brave_video_search( + query=arguments["query"], + count=arguments.get("count"), + offset=arguments.get("offset"), + country=arguments.get("country"), + search_lang=arguments.get("search_lang"), + safesearch=arguments.get("safesearch"), + freshness=arguments.get("freshness") + ) + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in brave_video_search: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + #------------------------------------------------------------------------- + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/brave_search/tools/__init__.py b/mcp_servers/brave_search/tools/__init__.py new file mode 100644 index 00000000..bfcce71c --- /dev/null +++ b/mcp_servers/brave_search/tools/__init__.py @@ -0,0 +1,18 @@ +from .base import ( +auth_token_context +) + +from .search import ( +brave_web_search, +brave_image_search, +brave_news_search, +brave_video_search +) + +__all__ = [ + "auth_token_context", + "brave_web_search", + "brave_image_search", + "brave_news_search", + "brave_video_search" +] diff --git a/mcp_servers/brave_search/tools/base.py b/mcp_servers/brave_search/tools/base.py new file mode 100644 index 00000000..1c8fdaf0 --- /dev/null +++ b/mcp_servers/brave_search/tools/base.py @@ -0,0 +1,48 @@ +import logging +import os +from contextvars import ContextVar +from typing import Optional +from dotenv import load_dotenv + +# Load env vars from .env +load_dotenv() + +logger = logging.getLogger(__name__) + +# Context variable to store the auth token per request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """ + Get the Brave API token from context or fallback to env. + """ + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable + token = os.getenv("BRAVE_SEARCH_API_KEY") + logger.debug(f"Using token from environment: {token}") + if not token: + raise RuntimeError("No Brave auth token found in context or environment") + return token + except LookupError: + # Context variable not set at all + token = os.getenv("BRAVE_SEARCH_API_KEY") + if not token: + raise RuntimeError("No Brave auth token found in context or environment") + return token + +def get_brave_client() -> Optional[dict]: + """ + Return a Brave client config (e.g., base_url and headers) ready to use. + """ + try: + auth_token = get_auth_token() + client = auth_token + return client + except RuntimeError as e: + logger.warning(f"Failed to get Brave auth token: {e}") + return None + except Exception as e: + logger.error(f"Failed to initialize Brave client: {e}") + return None diff --git a/mcp_servers/brave_search/tools/search.py b/mcp_servers/brave_search/tools/search.py new file mode 100644 index 00000000..ef09a679 --- /dev/null +++ b/mcp_servers/brave_search/tools/search.py @@ -0,0 +1,238 @@ +import httpx +from .base import get_brave_client +import logging + +# Configure logging +logger = logging.getLogger(__name__) + + +async def brave_web_search( + query: str, + count: int = 5, + offset: int = None, + country: str = None, + search_lang: str = None, + safesearch: str = None +) -> dict: + """ + Perform a Brave search query. + + Args: + query (str): [Required] The user's search query. + count (int): Number of results (max 20, default 5). + offset (int): For pagination. + country (str): 2-letter country code, e.g., 'US'. + search_lang (str): Language code, e.g., 'en'. + safesearch (str): 'off', 'moderate', or 'strict'. + Returns: + dict: JSON response. + """ + url = "https://api.search.brave.com/res/v1/web/search" + headers = { + "Accept": "application/json", + "Accept-Encoding": "gzip", + "x-subscription-token": get_brave_client() + } + + params = {"q": query, + "count": count} + + param_list = [ + ("country", country), + ("search_lang", search_lang), + ("offset", offset), + ("safesearch", safesearch), + ] + for k, v in param_list: + if v is not None: + params[k] = v + + logger.info(f"Sending Brave search request: {query}") + try: + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers, params=params) + response.raise_for_status() # Good practice to check for HTTP errors + logger.info("Received Brave search response") + return response.json() + + except Exception as e: + logger.error(f"Brave search failed: {e}") + return {"error": f"Could not complete Brave search for query: {query}"} + + +async def brave_image_search( + query: str, + count: int = 5, + offset: int = None, + search_lang: str = None, + country: str = None, + safesearch: str = None +) -> dict: + """ + Perform a Brave image search. + + Args: + query (str): [Required] Search query. + count (int): Number of image results (max 200, default 5). + offset (int): For pagination. + search_lang (str): Language code, e.g., 'en'. + country (str): 2-letter country code, e.g., 'US'. + safesearch (str): 'off' or 'strict' (default). + Returns: + dict: JSON response. + """ + token = get_brave_client() + if not token: + logger.error("Could not get Brave subscription token") + return {"error": "Missing Brave subscription token"} + + url = "https://api.search.brave.com/res/v1/images/search" + headers = { + "Accept": "application/json", + "Accept-Encoding": "gzip", + "x-subscription-token": token + } + + # Always include query + params = {"q": query, + 'count': count} + + # Optional query params + param_list = [ + ("search_lang", search_lang), + ("country", country), + ("safesearch", safesearch), + ("offset", offset) + ] + for k, v in param_list: + if v is not None: + params[k] = v + + logger.info(f"Sending Brave image search request: {query}") + try: + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers, params=params) + logger.info("Received Brave image search response") + return response.json() + except Exception as e: + logger.error(f"Brave image search failed: {e}") + return {"error": f"Could not complete Brave image search for query: {query}"} + + +async def brave_news_search( + query: str, + count: int = 5, + offset: int = None, + country: str = None, + search_lang: str = None, + safesearch: str = None, + freshness: str = None +) -> dict: + """ + Perform a Brave news search. + Args: + query (str): [Required] Search query. + count (int): Number of news results (max 50, default 5). + offset (int): For pagination. + country (str): 2-letter country code, e.g., 'US'. + search_lang (str): Language code, e.g., 'en'. + safesearch (str): 'off', 'moderate', or 'strict'. + freshness (str): Filter by recency: 'pd' (24h), 'pw' (7d), 'pm' (31d), 'py' (year). + Returns: + dict: JSON response. + """ + token = get_brave_client() + if not token: + logger.error("Could not get Brave subscription token") + return {"error": "Missing Brave subscription token"} + + url = "https://api.search.brave.com/res/v1/news/search" + headers = { + "Accept": "application/json", + "Accept-Encoding": "gzip", + "x-subscription-token": token + } + + params = {"q": query, "count": count} + + param_list = [ + ("search_lang", search_lang), + ("country", country), + ("safesearch", safesearch), + ("offset", offset), + ("freshness", freshness), + ] + for k, v in param_list: + if v is not None: + params[k] = v + + logger.info(f"Sending Brave news search request: {query}") + try: + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers, params=params) + logger.info("Received Brave news search response") + return response.json() + except Exception as e: + logger.error(f"Brave news search failed: {e}") + return {"error": f"Could not complete Brave news search for query: {query}"} + + +async def brave_video_search( + query: str, + count: int = 5, + offset: int = None, + country: str = None, + search_lang: str = None, + safesearch: str = None, + freshness: str = None +) -> dict: + """ + Perform a Brave video search. + + Args: + query (str): [Required] Search query. + count (int): Number of video results (max 50, default 5). + offset (int): For pagination. + country (str): 2-letter country code, e.g., 'US'. + search_lang (str): Language code, e.g., 'en'. + safesearch (str): 'off', 'moderate', or 'strict'. + freshness (str): Filter by recency: 'pd' (24h), 'pw' (7d), 'pm' (31d), 'py' (year). + Returns: + dict: JSON response. + """ + + token = get_brave_client() + if not token: + logger.error("Could not get Brave subscription token") + return {"error": "Missing Brave subscription token"} + + url = "https://api.search.brave.com/res/v1/videos/search" + headers = { + "Accept": "application/json", + "Accept-Encoding": "gzip", + "x-subscription-token": token + } + + params = {"q": query, + "count": count} + + param_list = [ + ("search_lang", search_lang), + ("country", country), + ("offset", offset), + ("freshness", freshness), + ("safesearch", safesearch), + ] + for k, v in param_list: + if v is not None: + params[k] = v + + logger.info(f"Sending Brave video search request: {query}") + try: + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers, params=params) + logger.info("Received Brave video search response") + return response.json() + except Exception as e: + logger.error(f"Brave video search failed: {e}") + return {"error": f"Could not complete Brave video search for query: {query}"} diff --git a/mcp_servers/cal_com/.env.example b/mcp_servers/cal_com/.env.example new file mode 100644 index 00000000..d2609396 --- /dev/null +++ b/mcp_servers/cal_com/.env.example @@ -0,0 +1,2 @@ +CAL_COM_API_KEY='' +CAL_COM_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/cal_com/Dockerfile b/mcp_servers/cal_com/Dockerfile new file mode 100644 index 00000000..7c5b689e --- /dev/null +++ b/mcp_servers/cal_com/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +RUN pip install --no-cache-dir --upgrade pip + +COPY mcp_servers/cal_com/requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/cal_com/server.py . +COPY mcp_servers/cal_com/tools/ ./tools/ + +EXPOSE 5000 + +CMD ["python", "-u", "server.py"] diff --git a/mcp_servers/cal_com/README.md b/mcp_servers/cal_com/README.md new file mode 100644 index 00000000..1f8c40d8 --- /dev/null +++ b/mcp_servers/cal_com/README.md @@ -0,0 +1,78 @@ +# Cal.com MCP Server + +A Model Context Protocol (MCP) server for Cal.com integration. Manage bookings, availability, and scheduling using Cal.com's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Cal.com with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("CAL_COM", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/calcom-mcp-server:latest + + +# Run Cal.com MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/calcom-mcp-server:latest + + +# Run Cal.com MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_cal_com_api_key_here"}' \ + ghcr.io/klavis-ai/calcom-mcp-server:latest +``` + +**OAuth Setup:** Cal.com requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Booking Management**: Create, update, and cancel meeting bookings +- **Availability Control**: Manage availability schedules and time slots +- **Event Types**: Configure meeting types and settings +- **User Management**: Handle user profiles and team configurations +- **Integration Settings**: Manage calendar and app integrations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/cal_com/requirements.txt b/mcp_servers/cal_com/requirements.txt new file mode 100644 index 00000000..fef25453 --- /dev/null +++ b/mcp_servers/cal_com/requirements.txt @@ -0,0 +1 @@ +mcp==1.11.0 \ No newline at end of file diff --git a/mcp_servers/cal_com/server.py b/mcp_servers/cal_com/server.py new file mode 100644 index 00000000..07b16e8c --- /dev/null +++ b/mcp_servers/cal_com/server.py @@ -0,0 +1,529 @@ +import contextlib +import logging +import os +import json +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict, List + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + # base.py + auth_token_context, + + # schedule.py + cal_get_all_schedules, + cal_create_a_schedule, + cal_update_a_schedule, + cal_get_default_schedule, + cal_get_schedule, + cal_delete_a_schedule +) + + + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +CAL_COM_MCP_SERVER_PORT = int(os.getenv("CAL_COM_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=CAL_COM_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) + +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("cal-com-mcp-server") +#------------------------------------------------------------------- + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Get all schedules + types.Tool( + name="cal_get_all_schedules", + description="Retrieve all schedules from Cal.com API.", + inputSchema={ + "type": "object", + "properties": {}, # No parameters required + "required": [] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE", "readOnlyHint": True} + ), + ), + + # Create a schedule + types.Tool( + name="cal_create_a_schedule", + description="Create a new schedule in Cal.com.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the new schedule" + }, + "timeZone": { + "type": "string", + "description": "Time zone ID (e.g., 'America/New_York')" + }, + "isDefault": { + "type": "boolean", + "description": "Whether this should be the default schedule" + }, + "availability": { + "type": "array", + "description": "List of availability blocks", + "items": { + "type": "object", + "properties": { + "days": { + "type": "array", + "items": {"type": "string"}, + "description": "Capitalized day names (e.g., ['Monday','Tuesday'])" + }, + "startTime": { + "type": "string", + "description": "Start time in HH:mm format" + }, + "endTime": { + "type": "string", + "description": "End time in HH:mm format" + } + } + } + }, + "overrides": { + "type": "array", + "description": "Date-specific overrides", + "items": { + "type": "object", + "properties": { + "date": { + "type": "string", + "description": "Date in YYYY-MM-DD format" + }, + "startTime": { + "type": "string", + "description": "Start time in HH:mm format" + }, + "endTime": { + "type": "string", + "description": "End time in HH:mm format" + } + } + } + } + }, + "required": ["name", "timeZone", "isDefault"] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE"} + ), + ), + + types.Tool( + name="cal_update_a_schedule", + description="Update an existing schedule in Cal.com.", + inputSchema={ + "type": "object", + "properties": { + "schedule_id": { + "type": "integer", + "description": "ID of the schedule to update" + }, + "name": { + "type": "string", + "description": "Updated schedule name" + }, + "timeZone": { + "type": "string", + "description": "Updated time zone ID (e.g., 'America/New_York')" + }, + "isDefault": { + "type": "boolean", + "description": "Whether to make this the default schedule" + }, + "availability": { + "type": "array", + "description": "Updated availability blocks", + "items": { + "type": "object", + "properties": { + "days": { + "type": "array", + "items": {"type": "string"}, + "description": "Capitalized day names (e.g., ['Monday','Tuesday'])" + }, + "startTime": { + "type": "string", + "description": "Start time in HH:mm format (e.g., '09:00')" + }, + "endTime": { + "type": "string", + "description": "End time in HH:mm format (e.g., '17:00')" + } + } + } + }, + "overrides": { + "type": "array", + "description": "Updated date overrides", + "items": { + "type": "object", + "properties": { + "date": { + "type": "string", + "description": "Date in YYYY-MM-DD format (e.g., '2023-12-31')" + }, + "startTime": { + "type": "string", + "description": "Start time in HH:mm format (e.g., '10:00')" + }, + "endTime": { + "type": "string", + "description": "End time in HH:mm format (e.g., '15:00')" + } + } + } + } + }, + "required": ["schedule_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE"} + ), + ), + + # Get default schedule + types.Tool( + name="cal_get_default_schedule", + description="Get the default schedule from Cal.com.", + inputSchema={ + "type": "object", + "properties": {}, # No parameters + "required": [] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE", "readOnlyHint": True} + ), + ), + + # Get specific schedule + types.Tool( + name="cal_get_schedule", + description="Get a specific schedule by its ID.", + inputSchema={ + "type": "object", + "properties": { + "schedule_id": { + "type": "integer", + "description": "ID of the schedule to retrieve" + } + }, + "required": ["schedule_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE", "readOnlyHint": True} + ), + ), + + # Delete a schedule + types.Tool( + name="cal_delete_a_schedule", + description="Delete a schedule by its ID.", + inputSchema={ + "type": "object", + "properties": { + "schedule_id": { + "type": "integer", + "description": "ID of the schedule to delete" + } + }, + "required": ["schedule_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "CAL_SCHEDULE"} + ), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, + arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + #Schedule.py------------------------------------------------------------------ + if name == "cal_get_all_schedules": + try: + result = await cal_get_all_schedules() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error getting all schedules: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "cal_create_a_schedule": + try: + result = await cal_create_a_schedule( + name=arguments["name"], + timeZone=arguments["timeZone"], + isDefault=arguments["isDefault"], + availability=arguments.get("availability"), + overrides=arguments.get("overrides") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating schedule: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "cal_update_a_schedule": + try: + result = await cal_update_a_schedule( + schedule_id=arguments["schedule_id"], + name=arguments.get("name"), + timeZone=arguments.get("timeZone"), + isDefault=arguments.get("isDefault"), + availability=arguments.get("availability"), + overrides=arguments.get("overrides") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error updating schedule: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "cal_get_default_schedule": + try: + result = await cal_get_default_schedule() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error getting default schedule: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "cal_get_schedule": + try: + result = await cal_get_schedule( + schedule_id=arguments["schedule_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error getting schedule: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "cal_delete_a_schedule": + try: + result = await cal_delete_a_schedule( + schedule_id=arguments["schedule_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error deleting schedule: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + #------------------------------------------------------------------------- + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/cal_com/tools/__init__.py b/mcp_servers/cal_com/tools/__init__.py new file mode 100644 index 00000000..e87e2c0c --- /dev/null +++ b/mcp_servers/cal_com/tools/__init__.py @@ -0,0 +1,25 @@ +from .base import ( + auth_token_context +) + +from .schedule import ( + cal_get_all_schedules, + cal_create_a_schedule, + cal_update_a_schedule, + cal_get_default_schedule, + cal_get_schedule, + cal_delete_a_schedule +) + +__all__ = [ + # base.py + "auth_token_context", + + # schedule.py + "cal_get_all_schedules", + "cal_create_a_schedule", + "cal_update_a_schedule", + "cal_get_default_schedule", + "cal_get_schedule", + "cal_delete_a_schedule", +] \ No newline at end of file diff --git a/mcp_servers/cal_com/tools/base.py b/mcp_servers/cal_com/tools/base.py new file mode 100644 index 00000000..c90c2faf --- /dev/null +++ b/mcp_servers/cal_com/tools/base.py @@ -0,0 +1,39 @@ +import logging +import os +from contextvars import ContextVar +from typing import Optional +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() + +logger = logging.getLogger(__name__) + +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable if no token in context + token = os.getenv("CAL_COM_API_KEY") + if not token: + raise RuntimeError("No authentication token available") + return token + except LookupError: + token = os.getenv("CAL_COM_API_KEY") + if not token: + raise RuntimeError("Authentication token not found in context or environment") + return token + +def get_calcom_client() -> Optional[dict]: + """ + Return a simple client dict with base_url and headers. + """ + try: + auth_token = get_auth_token() + client = auth_token + return client + except RuntimeError as e: + logger.warning(f"Failed to get auth token: {e}") + return None \ No newline at end of file diff --git a/mcp_servers/cal_com/tools/schedule.py b/mcp_servers/cal_com/tools/schedule.py new file mode 100644 index 00000000..964eafff --- /dev/null +++ b/mcp_servers/cal_com/tools/schedule.py @@ -0,0 +1,345 @@ +import httpx +import json +import logging +from .base import get_calcom_client + + +# Configure logging +logger = logging.getLogger(__name__) + + +def header(): + client = get_calcom_client() + if not client: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + value = { + "Authorization": client, + "cal-api-version": "2024-06-11" + } + + return value + + + +async def cal_get_all_schedules() -> dict: + """ + Retrieve all schedules from Cal.com API. + + Returns: + dict: On success → parsed JSON response from Cal.com API. + On failure → dict with "error" key and message. + """ + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + url = "https://api.cal.com/v2/schedules/" + logging.info(f"Requesting Cal.com schedules from {url}") + + try: + # Use an async context manager to handle the client's lifecycle + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers) + # This checks for HTTP error statuses (e.g., 404, 500) + response.raise_for_status() + logging.info("Successfully retrieved Cal.com schedules") + return response.json() + except httpx.RequestError as e: + logging.error(f"Could not get Cal.com schedules from {url}: {e}") + return {"error": f"Could not get Cal.com schedules from {url}"} + except Exception as e: + logging.error(f"Unexpected error when fetching Cal.com schedules: {e}") + return {"error": "Unexpected error occurred"} + +async def cal_create_a_schedule( + name: str, + timeZone: str, + isDefault: bool, + availability: list = None, + overrides: list = None +) -> dict: + """ + Create a new schedule in Cal.com. + + Args: + name (str): Schedule name. + timeZone (str): Time zone string (e.g., "America/New_York"). + isDefault (bool): Whether this should be the default schedule. + availability (list, optional): List of availability blocks. Each block is a dict: + { + "days": ["Monday", "Tuesday", ...], # Days must start with a capital letter + "startTime": "09:00", # Time format: "HH:mm" + "endTime": "17:00" + } + overrides (list, optional): List of overrides for specific dates. Each override is a dict: + { + "date": "YYYY-MM-DD", + "startTime": "10:00", # Time format: "HH:mm" + "endTime": "12:00" + } + + Returns: + dict: If successful, parsed JSON response from Cal.com. + If failed, dict with "error" key and message. + """ + + url = "https://api.cal.com/v2/schedules/" + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + payload = { + "name": name, + "timeZone": timeZone, + "isDefault": isDefault, + } + + if availability: + payload["availability"] = availability + if overrides: + payload["overrides"] = overrides + + logging.info(f"Creating Cal.com schedule: {name}") + + try: + # Use an async context manager for the client + async with httpx.AsyncClient() as client: + # The 'json' parameter works the same as in requests + response = await client.post(url, json=payload, headers=headers) + # Check for HTTP errors (e.g., 4xx or 5xx responses) + response.raise_for_status() + logging.info("Successfully created Cal.com schedule") + return response.json() + + except httpx.RequestError as e: + logging.error(f"Could not create Cal.com schedule: {e}") + return {"error": f"Could not create Cal.com schedule: {e}"} + except Exception as e: + logging.error(f"Unexpected error when creating Cal.com schedule: {e}") + return {"error": "Unexpected error occurred"} + +async def cal_update_a_schedule( + schedule_id: int, + name: str = None, + timeZone: str = None, + isDefault: bool = None, + availability: list = None, + overrides: list = None +) -> dict: + """ + Update an existing schedule in Cal.com. + + Args: + schedule_id (int): ID of the schedule to update (required). + name (str, optional): New schedule name. + timeZone (str, optional): New time zone string (e.g., "America/New_York"). + isDefault (bool, optional): Set as default schedule. + availability (list, optional): List of availability blocks. Each block is a dict: + { + "days": ["Monday", "Tuesday", ...], # Days must start with a capital letter + "startTime": "09:00", # Time format: "HH:mm" + "endTime": "17:00" + } + overrides (list, optional): List of overrides for specific dates. Each override is a dict: + { + "date": "YYYY-MM-DD", + "startTime": "10:00", # Time format: "HH:mm" + "endTime": "12:00" + } + + Returns: + dict: If successful, parsed JSON response from Cal.com. + If failed, dict with "error" key and message. + """ + + url = "https://api.cal.com/v2/schedules/" + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + if not schedule_id: + logging.error("Missing required: schedule_id") + return {"error": "Missing required: schedule_id"} + + url_new = url + str(schedule_id) + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + payload = {} + + if name is not None: + payload["name"] = name + if timeZone is not None: + payload["timeZone"] = timeZone + if isDefault is not None: + payload["isDefault"] = isDefault + if availability: + payload["availability"] = availability + if overrides: + payload["overrides"] = overrides + + logging.info(f"Updating Cal.com schedule ID: {schedule_id}") + + try: + # Use an async context manager for the client + async with httpx.AsyncClient() as client: + # Make an async PATCH request + response = await client.patch(url_new, json=payload, headers=headers) + + # Check for HTTP errors (e.g., 4xx or 5xx responses) + response.raise_for_status() + + logging.info("Successfully updated Cal.com schedule") + return response.json() + + except httpx.RequestError as e: + # Catch httpx-specific request errors + logging.error(f"Could not update Cal.com schedule: {e}") + return {"error": f"Could not update Cal.com schedule: {e}"} + + except Exception as e: + # A general catch-all for any other errors + logging.error(f"Unexpected error when updating Cal.com schedule: {e}") + return {"error": "Unexpected error occurred"} + +async def cal_get_default_schedule() -> dict: + """ + Get the default schedule from Cal.com. + + Returns: + dict: If successful, parsed JSON response from Cal.com. + If failed, dict with "error" key and message. + """ + + url = "https://api.cal.com/v2/schedules/" + url_new = url + "default" + + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + logging.info("Fetching default schedule from Cal.com") + + try: + # Use an async context manager for the client + async with httpx.AsyncClient() as client: + # Make an async GET request + response = await client.get(url_new, headers=headers) + + # Check for HTTP errors (e.g., 4xx or 5xx responses) + response.raise_for_status() + + logging.info("Successfully fetched default schedule") + return response.json() + + except httpx.RequestError as e: + # Catch httpx-specific request errors + logging.error(f"Could not get default schedule: {e}") + return {"error": f"Could not get default schedule: {e}"} + + except Exception as e: + # A general catch-all for any other errors + logging.error(f"Unexpected error when getting default schedule: {e}") + return {"error": "Unexpected error occurred"} + + +async def cal_get_schedule(schedule_id: int) -> dict: + """ + Get a specific schedule from Cal.com by its ID. + + Args: + schedule_id (int): ID of the schedule to fetch. + + Returns: + dict: If successful, parsed JSON response from Cal.com. + If failed, dict with "error" key and message. + """ + + url = "https://api.cal.com/v2/schedules/" + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + if not schedule_id: + logging.error("Missing required: schedule_id") + return {"error": "Missing required: schedule_id"} + + url_new = url + str(schedule_id) + logging.info(f"Fetching Cal.com schedule ID: {schedule_id}") + + try: + # Use an async context manager for the client + async with httpx.AsyncClient() as client: + # Make an async GET request + response = await client.get(url_new, headers=headers) + + # Check for HTTP errors (e.g., 4xx or 5xx responses) + response.raise_for_status() + + logging.info("Successfully fetched schedule") + return response.json() + + except httpx.RequestError as e: + # Catch httpx-specific request errors + logging.error(f"Could not get schedule: {e}") + return {"error": f"Could not get schedule: {e}"} + + except Exception as e: + # A general catch-all for any other errors + logging.error(f"Unexpected error when getting schedule: {e}") + return {"error": "Unexpected error occurred"} + +async def cal_delete_a_schedule(schedule_id: int) -> dict: + """ + Delete a schedule in Cal.com by its ID. + + Args: + schedule_id (int): ID of the schedule to delete. + + Returns: + dict: If successful, parsed JSON response from Cal.com. + If failed, dict with "error" key and message. + """ + + url = "https://api.cal.com/v2/schedules/" + headers = header() + if not headers: + logging.error("Could not get Cal.com client") + return {"error": "Could not get Cal.com client"} + + if not schedule_id: + logging.error("Missing required: schedule_id") + return {"error": "Missing required: schedule_id"} + + url_new = url + str(schedule_id) + + logging.info(f"Deleting Cal.com schedule ID: {schedule_id}") + + try: + # Use an async context manager for the client + async with httpx.AsyncClient() as client: + # Make an async DELETE request + response = await client.delete(url_new, headers=headers) + + # Check for HTTP errors (e.g., 4xx or 5xx responses) + response.raise_for_status() + + logging.info("Successfully deleted schedule") + return response.json() + + except httpx.RequestError as e: + # Catch httpx-specific request errors + logging.error(f"Could not delete schedule: {e}") + return {"error": f"Could not delete schedule: {e}"} + + except Exception as e: + # A general catch-all for any other errors + logging.error(f"Unexpected error when deleting schedule: {e}") + return {"error": "Unexpected error occurred"} \ No newline at end of file diff --git a/mcp_servers/calendly/.env.example b/mcp_servers/calendly/.env.example new file mode 100644 index 00000000..6a2394a8 --- /dev/null +++ b/mcp_servers/calendly/.env.example @@ -0,0 +1,2 @@ +CALENDLY_ACCESS_TOKEN=YOUR_CALENDLY_ACCESS_TOKEN_HERE +CALENDLY_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/calendly/Dockerfile b/mcp_servers/calendly/Dockerfile new file mode 100644 index 00000000..9a8b474d --- /dev/null +++ b/mcp_servers/calendly/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/calendly/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/calendly/server.py . +COPY mcp_servers/calendly/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/calendly/requirements.txt b/mcp_servers/calendly/requirements.txt new file mode 100644 index 00000000..e7aa1f25 --- /dev/null +++ b/mcp_servers/calendly/requirements.txt @@ -0,0 +1,10 @@ +mcp>=1.12.0 +fastapi +uvicorn[standard] +click>=8.0.0 +pydantic>=2.5.0 +aiohttp>=3.8.0 +httpx>=0.27.0 +python-dotenv>=1.0.0 +typing-extensions +starlette>=0.27.0 \ No newline at end of file diff --git a/mcp_servers/calendly/server.py b/mcp_servers/calendly/server.py new file mode 100644 index 00000000..64a9a6d9 --- /dev/null +++ b/mcp_servers/calendly/server.py @@ -0,0 +1,409 @@ +import os +import logging +import contextlib +import json +from collections.abc import AsyncIterator +from typing import Any, Dict, List, Optional +from contextvars import ContextVar + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + auth_token_context, + get_user_info, + list_events, + get_event_details, + list_event_types, + list_availability_schedules, + list_event_invitees, +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +CALENDLY_MCP_SERVER_PORT = int(os.getenv("CALENDLY_MCP_SERVER_PORT", "5000")) + +@click.command() +@click.option("--port", default=CALENDLY_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("calendly-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="calendly_get_user_info", + description="Get current user's Calendly profile information.", + inputSchema={ + "type": "object", + "properties": {} + } + ), + types.Tool( + name="calendly_list_events", + description="List scheduled events for the current user.", + inputSchema={ + "type": "object", + "properties": { + "status": { + "type": "string", + "description": "Filter events by status (active, canceled). Leave empty for all.", + "enum": ["active", "canceled"] + }, + "count": { + "type": "integer", + "description": "Number of events to return (max 100).", + "default": 20, + "maximum": 100 + } + } + } + ), + types.Tool( + name="calendly_get_event_details", + description="Get detailed information about a specific event.", + inputSchema={ + "type": "object", + "required": ["event_uuid"], + "properties": { + "event_uuid": { + "type": "string", + "description": "The UUID of the event to retrieve details for." + } + } + } + ), + types.Tool( + name="calendly_list_event_types", + description="List available event types for the current user.", + inputSchema={ + "type": "object", + "properties": { + "active": { + "type": "boolean", + "description": "Filter by active status. Leave empty for all.", + "default": True + }, + "count": { + "type": "integer", + "description": "Number of event types to return (max 100).", + "default": 20, + "maximum": 100 + } + } + } + ), + types.Tool( + name="calendly_list_availability_schedules", + description="List the availability schedules of the given user.", + inputSchema={ + "type": "object", + "properties": { + "user_uri": { + "type": "string", + "description": "The URI of the user to get availability schedules for. If not provided, uses current user." + }, + "count": { + "type": "integer", + "description": "Number of availability schedules to return (max 100).", + "default": 20, + "maximum": 100 + } + } + } + ), + types.Tool( + name="calendly_list_event_invitees", + description="List invitees for a specific scheduled event.", + inputSchema={ + "type": "object", + "required": ["event_uuid"], + "properties": { + "event_uuid": { + "type": "string", + "description": "The UUID of the event to list invitees for." + }, + "count": { + "type": "integer", + "description": "Number of invitees to return (max 100).", + "default": 20, + "maximum": 100 + }, + "email": { + "type": "string", + "description": "Filter invitees by email address." + }, + "status": { + "type": "string", + "description": "Filter invitees by status.", + "enum": ["active", "canceled"] + } + } + } + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "calendly_get_user_info": + try: + result = await get_user_info() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "calendly_list_events": + status = arguments.get("status") + count = arguments.get("count", 20) + try: + result = await list_events(status, count) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "calendly_get_event_details": + event_uuid = arguments.get("event_uuid") + if not event_uuid: + return [ + types.TextContent( + type="text", + text="Error: event_uuid parameter is required", + ) + ] + try: + result = await get_event_details(event_uuid) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "calendly_list_event_types": + active = arguments.get("active", True) + count = arguments.get("count", 20) + try: + result = await list_event_types(active, count) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "calendly_list_availability_schedules": + user_uri = arguments.get("user_uri") + count = arguments.get("count", 20) + try: + result = await list_availability_schedules(user_uri, count) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "calendly_list_event_invitees": + event_uuid = arguments.get("event_uuid") + count = arguments.get("count", 20) + email = arguments.get("email") + status = arguments.get("status") + if not event_uuid: + return [ + types.TextContent( + type="text", + text="Error: event_uuid parameter is required", + ) + ] + try: + result = await list_event_invitees(event_uuid, count, email, status) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers (allow None - will be handled at tool level) + calendly_token = request.headers.get('x-auth-token') + + # Set the auth token in context for this request (can be None) + token = auth_token_context.set(calendly_token or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers (allow None - will be handled at tool level) + headers = dict(scope.get("headers", [])) + calendly_token = headers.get(b'x-auth-token') + if calendly_token: + calendly_token = calendly_token.decode('utf-8') + + # Set the auth token in context for this request (can be None/empty) + token = auth_token_context.set(calendly_token or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/calendly/tools/__init__.py b/mcp_servers/calendly/tools/__init__.py new file mode 100644 index 00000000..8e7b5ba9 --- /dev/null +++ b/mcp_servers/calendly/tools/__init__.py @@ -0,0 +1,18 @@ +from .auth import get_user_info +from .events import list_events, get_event_details, list_event_types, list_availability_schedules, list_event_invitees +from .base import auth_token_context + +__all__ = [ + # Auth + "get_user_info", + + # Events + "get_event_details", + "list_events", + "list_event_types", + "list_availability_schedules", + "list_event_invitees", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/calendly/tools/auth.py b/mcp_servers/calendly/tools/auth.py new file mode 100644 index 00000000..5673f089 --- /dev/null +++ b/mcp_servers/calendly/tools/auth.py @@ -0,0 +1,35 @@ +import logging +from typing import Any, Dict +from .base import make_calendly_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_user_info() -> Dict[str, Any]: + """Get current user's Calendly profile information.""" + logger.info("Executing tool: get_user_info") + try: + # Get current user information + endpoint = "/users/me" + + # Get user data + user_data = await make_calendly_request("GET", endpoint) + + # Extract essential user information from the response + if 'resource' in user_data: + resource = user_data['resource'] + user_info = { + "uri": resource.get("uri"), + "name": resource.get("name"), + "slug": resource.get("slug"), + "email": resource.get("email"), + "scheduling_url": resource.get("scheduling_url"), + "timezone": resource.get("timezone"), + } + else: + user_info = user_data + + return user_info + except Exception as e: + logger.exception(f"Error executing tool get_user_info: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/calendly/tools/base.py b/mcp_servers/calendly/tools/base.py new file mode 100644 index 00000000..d3cba5d8 --- /dev/null +++ b/mcp_servers/calendly/tools/base.py @@ -0,0 +1,112 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +# Calendly API constants +CALENDLY_API_BASE = "https://api.calendly.com/" + +# Context variable to store the authentication token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +class CalendlyClient: + """Client for Calendly API using Bearer Authentication.""" + + @staticmethod + async def make_request( + method: str, + endpoint: str, + json_data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + expect_empty_response: bool = False + ) -> Dict[str, Any]: + """Make an HTTP request to Calendly API.""" + access_token = get_auth_token() + + if not access_token: + raise RuntimeError("No access token provided. Please set the x-auth-token header.") + + headers = { + "Authorization": f"Bearer {access_token}", + "Content-Type": "application/json" + } + + url = f"{CALENDLY_API_BASE}{endpoint}" + + async with httpx.AsyncClient() as client: + try: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=json_data) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=json_data) + elif method.upper() == "PATCH": + response = await client.patch(url, headers=headers, json=json_data) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Handle empty responses + if expect_empty_response or response.status_code == 204 or not response.content: + return {"success": True} + + try: + json_response = response.json() + # Handle null/undefined responses + if json_response is None: + return {"data": None, "message": "API returned null response"} + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + + except httpx.HTTPStatusError as e: + logger.error(f"Calendly API request failed: {e.response.status_code} {e.response.reason_phrase} for {method} {url}") + error_details = e.response.reason_phrase + try: + error_body = e.response.json() + error_details = f"{e.response.reason_phrase} - {error_body}" + except Exception: + pass + raise RuntimeError(f"Calendly API Error ({e.response.status_code}): {error_details}") from e + except Exception as e: + logger.error(f"An unexpected error occurred during Calendly API request: {e}") + raise RuntimeError(f"Unexpected error during API call to {method} {url}") from e + +async def make_calendly_request( + method: str, + endpoint: str, + json_data: Optional[Dict] = None, + params: Optional[Dict] = None, + expect_empty_response: bool = False +) -> Any: + """ + Makes an HTTP request to the Calendly API. + + Args: + method: HTTP method (GET, POST, etc.) + endpoint: API endpoint (should start with /) + json_data: JSON payload for POST/PUT requests + params: Query parameters + expect_empty_response: Whether to expect an empty response (for some operations) + + Returns: + Response data as dict, or None for empty responses + """ + return await CalendlyClient.make_request(method, endpoint, json_data, params, expect_empty_response) \ No newline at end of file diff --git a/mcp_servers/calendly/tools/events.py b/mcp_servers/calendly/tools/events.py new file mode 100644 index 00000000..2cfaaf75 --- /dev/null +++ b/mcp_servers/calendly/tools/events.py @@ -0,0 +1,151 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_calendly_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def list_events( + status: Optional[str] = None, + count: int = 20 +) -> Dict[str, Any]: + """List scheduled events for the current user.""" + logger.info(f"Executing tool: list_events with status: {status}, count: {count}") + try: + # First get current user to get their URI + user_data = await make_calendly_request("GET", "/users/me") + user_uri = user_data.get("resource", {}).get("uri") + + if not user_uri: + raise RuntimeError("Unable to get current user URI") + + # Build query parameters + params = { + "user": user_uri, + "count": min(count, 100) # Calendly API limit + } + + if status: + params["status"] = status + + # Get events + endpoint = "/scheduled_events" + events_data = await make_calendly_request("GET", endpoint, params=params) + + return events_data + except Exception as e: + logger.exception(f"Error executing tool list_events: {e}") + raise e + +async def get_event_details(event_uuid: str) -> Dict[str, Any]: + """Get detailed information about a specific event.""" + logger.info(f"Executing tool: get_event_details with event_uuid: {event_uuid}") + try: + # Get event details + endpoint = f"/scheduled_events/{event_uuid}" + event_data = await make_calendly_request("GET", endpoint) + + # Also get invitees for this event + try: + invitees_endpoint = f"/scheduled_events/{event_uuid}/invitees" + invitees_data = await make_calendly_request("GET", invitees_endpoint) + event_data["invitees"] = invitees_data + except Exception as e: + logger.warning(f"Could not get invitees for event {event_uuid}: {e}") + event_data["invitees"] = {"error": str(e)} + + return event_data + except Exception as e: + logger.exception(f"Error executing tool get_event_details: {e}") + raise e + +async def list_event_types( + active: bool = True, + count: int = 20 +) -> Dict[str, Any]: + """List available event types for the current user.""" + logger.info(f"Executing tool: list_event_types with active: {active}, count: {count}") + try: + # First get current user to get their URI + user_data = await make_calendly_request("GET", "/users/me") + user_uri = user_data.get("resource", {}).get("uri") + + if not user_uri: + raise RuntimeError("Unable to get current user URI") + + # Build query parameters + params = { + "user": user_uri, + "count": min(count, 100) # Calendly API limit + } + + if active is not None: + params["active"] = "true" if active else "false" + + # Get event types + endpoint = "/event_types" + event_types_data = await make_calendly_request("GET", endpoint, params=params) + + return event_types_data + except Exception as e: + logger.exception(f"Error executing tool list_event_types: {e}") + raise e + +async def list_availability_schedules( + user_uri: Optional[str] = None, + count: int = 20 +) -> Dict[str, Any]: + """List the availability schedules of the given user.""" + logger.info(f"Executing tool: list_availability_schedules with user_uri: {user_uri}, count: {count}") + try: + # If no user_uri provided, get current user's URI + if not user_uri: + user_data = await make_calendly_request("GET", "/users/me") + user_uri = user_data.get("resource", {}).get("uri") + + if not user_uri: + raise RuntimeError("Unable to get current user URI") + + # Build query parameters + params = { + "user": user_uri, + "count": min(count, 100) # Calendly API limit + } + + # Get user availability schedules + endpoint = "/user_availability_schedules" + schedules_data = await make_calendly_request("GET", endpoint, params=params) + + return schedules_data + except Exception as e: + logger.exception(f"Error executing tool list_availability_schedules: {e}") + raise e + +async def list_event_invitees( + event_uuid: str, + count: int = 20, + email: Optional[str] = None, + status: Optional[str] = None +) -> Dict[str, Any]: + """List invitees for a specific scheduled event.""" + logger.info(f"Executing tool: list_event_invitees with event_uuid: {event_uuid}, count: {count}, email: {email}, status: {status}") + try: + # Build query parameters + params = { + "count": min(count, 100) # Calendly API limit + } + + if email: + params["email"] = email + + if status: + params["status"] = status + + # Get event invitees + endpoint = f"/scheduled_events/{event_uuid}/invitees" + invitees_data = await make_calendly_request("GET", endpoint, params=params) + + return invitees_data + except Exception as e: + logger.exception(f"Error executing tool list_event_invitees: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/.env.example b/mcp_servers/clickup/.env.example new file mode 100644 index 00000000..266e8237 --- /dev/null +++ b/mcp_servers/clickup/.env.example @@ -0,0 +1 @@ +CLICKUP_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/clickup/Dockerfile b/mcp_servers/clickup/Dockerfile new file mode 100644 index 00000000..87d17401 --- /dev/null +++ b/mcp_servers/clickup/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/clickup/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/clickup/server.py . +COPY mcp_servers/clickup/tools/ ./tools/ + +COPY mcp_servers/clickup/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/clickup/README.md b/mcp_servers/clickup/README.md new file mode 100644 index 00000000..71d33243 --- /dev/null +++ b/mcp_servers/clickup/README.md @@ -0,0 +1,78 @@ +# ClickUp MCP Server + +A Model Context Protocol (MCP) server for ClickUp integration. Manage tasks, projects, and team workflows using ClickUp's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to ClickUp with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("CLICKUP", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/clickup-mcp-server:latest + + +# Run ClickUp MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/clickup-mcp-server:latest + + +# Run ClickUp MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_clickup_api_token_here"}' \ + ghcr.io/klavis-ai/clickup-mcp-server:latest +``` + +**OAuth Setup:** ClickUp requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Task Management**: Create, read, update, and complete tasks +- **Project Operations**: Manage spaces, folders, and lists +- **Time Tracking**: Handle time tracking and productivity metrics +- **Team Collaboration**: Manage team members and permissions +- **Custom Fields**: Work with custom task fields and properties + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/clickup/requirements.txt b/mcp_servers/clickup/requirements.txt new file mode 100644 index 00000000..7eda28a2 --- /dev/null +++ b/mcp_servers/clickup/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +requests +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/clickup/server.py b/mcp_servers/clickup/server.py new file mode 100644 index 00000000..70df4596 --- /dev/null +++ b/mcp_servers/clickup/server.py @@ -0,0 +1,918 @@ +import contextlib +import logging +import os +import json +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + get_teams, get_workspaces, + get_spaces, create_space, update_space, + get_folders, create_folder, update_folder, + get_lists, create_list, update_list, + get_tasks, get_task_by_id, create_task, update_task, search_tasks, + get_comments, create_comment, update_comment, + get_user, get_team_members +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +CLICKUP_MCP_SERVER_PORT = int(os.getenv("CLICKUP_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + + return "" + +@click.command() +@click.option("--port", default=CLICKUP_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("clickup-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Team/Workspace tools + types.Tool( + name="clickup_get_teams", + description="Get all teams/workspaces the user has access to.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TEAM", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_get_workspaces", + description="Get all workspaces (alias for get_teams).", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_WORKSPACE", "readOnlyHint": True} + ), + ), + + # Space tools + types.Tool( + name="clickup_get_spaces", + description="Get all spaces in a team.", + inputSchema={ + "type": "object", + "required": ["team_id"], + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to get spaces from.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_SPACE", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_create_space", + description="Create a new space in a team.", + inputSchema={ + "type": "object", + "required": ["team_id", "name"], + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to create the space in.", + }, + "name": { + "type": "string", + "description": "The name of the space.", + }, + "color": { + "type": "string", + "description": "The color for the space (optional).", + }, + "private": { + "type": "boolean", + "description": "Whether the space should be private (default: false).", + "default": False, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_SPACE"} + ), + ), + types.Tool( + name="clickup_update_space", + description="Update an existing space.", + inputSchema={ + "type": "object", + "required": ["space_id"], + "properties": { + "space_id": { + "type": "string", + "description": "The ID of the space to update.", + }, + "name": { + "type": "string", + "description": "The new name of the space.", + }, + "color": { + "type": "string", + "description": "The new color for the space.", + }, + "private": { + "type": "boolean", + "description": "Whether the space should be private.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_SPACE"} + ), + ), + + # Folder tools + types.Tool( + name="clickup_get_folders", + description="Get all folders in a space.", + inputSchema={ + "type": "object", + "required": ["space_id"], + "properties": { + "space_id": { + "type": "string", + "description": "The ID of the space to get folders from.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_FOLDER", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_create_folder", + description="Create a new folder in a space.", + inputSchema={ + "type": "object", + "required": ["space_id", "name"], + "properties": { + "space_id": { + "type": "string", + "description": "The ID of the space to create the folder in.", + }, + "name": { + "type": "string", + "description": "The name of the folder.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_FOLDER"} + ), + ), + types.Tool( + name="clickup_update_folder", + description="Update an existing folder.", + inputSchema={ + "type": "object", + "required": ["folder_id", "name"], + "properties": { + "folder_id": { + "type": "string", + "description": "The ID of the folder to update.", + }, + "name": { + "type": "string", + "description": "The new name of the folder.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_FOLDER"} + ), + ), + + # List tools + types.Tool( + name="clickup_get_lists", + description="Get all lists in a folder or space. Either folder_id or space_id must be provided.", + inputSchema={ + "type": "object", + "properties": { + "folder_id": { + "type": "string", + "description": "The ID of the folder to get lists from.", + }, + "space_id": { + "type": "string", + "description": "The ID of the space to get lists from.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_LIST", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_create_list", + description="Create a new list in a folder or space. Either folder_id or space_id must be provided along with name.", + inputSchema={ + "type": "object", + "required": ["name"], + "properties": { + "folder_id": { + "type": "string", + "description": "The ID of the folder to create the list in.", + }, + "space_id": { + "type": "string", + "description": "The ID of the space to create the list in.", + }, + "name": { + "type": "string", + "description": "The name of the list.", + }, + "content": { + "type": "string", + "description": "The description/content of the list.", + }, + "due_date": { + "type": "string", + "description": "Due date for the list (ISO date string).", + }, + "priority": { + "type": "integer", + "description": "Priority level (1=urgent, 2=high, 3=normal, 4=low).", + }, + "assignee": { + "type": "string", + "description": "User ID to assign the list to.", + }, + "status": { + "type": "string", + "description": "Status of the list.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_LIST"} + ), + ), + types.Tool( + name="clickup_update_list", + description="Update an existing list.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": { + "type": "string", + "description": "The ID of the list to update.", + }, + "name": { + "type": "string", + "description": "The new name of the list.", + }, + "content": { + "type": "string", + "description": "The new description/content of the list.", + }, + "due_date": { + "type": "string", + "description": "New due date for the list (ISO date string).", + }, + "priority": { + "type": "integer", + "description": "New priority level (1=urgent, 2=high, 3=normal, 4=low).", + }, + "assignee": { + "type": "string", + "description": "User ID to assign the list to.", + }, + "unset_status": { + "type": "boolean", + "description": "Whether to unset the status.", + "default": False, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_LIST"} + ), + ), + + # Task tools - continuing from line 316 + types.Tool( + name="clickup_get_tasks", + description="Get tasks from a list with optional filtering.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": { + "type": "string", + "description": "The ID of the list to get tasks from.", + }, + "archived": { + "type": "boolean", + "description": "Include archived tasks (default: false).", + "default": False, + }, + "include_closed": { + "type": "boolean", + "description": "Include closed tasks (default: false).", + "default": False, + }, + "page": { + "type": "integer", + "description": "Page number for pagination (default: 0).", + "default": 0, + }, + "subtasks": { + "type": "boolean", + "description": "Include subtasks (default: false).", + "default": False, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_get_task_by_id", + description="Get a specific task by ID.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to retrieve.", + }, + "include_subtasks": { + "type": "boolean", + "description": "Include subtasks (default: false).", + "default": False, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_create_task", + description="Create a new task in ClickUp.", + inputSchema={ + "type": "object", + "required": ["list_id", "name"], + "properties": { + "list_id": { + "type": "string", + "description": "The ID of the list to create the task in.", + }, + "name": { + "type": "string", + "description": "The name of the task.", + }, + "description": { + "type": "string", + "description": "The description of the task.", + }, + "assignees": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of user IDs to assign the task to.", + }, + "status": { + "type": "string", + "description": "The status of the task.", + }, + "priority": { + "type": "integer", + "description": "Priority level (1=urgent, 2=high, 3=normal, 4=low).", + }, + "due_date": { + "type": "integer", + "description": "Due date as Unix timestamp in milliseconds.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TASK"} + ), + ), + types.Tool( + name="clickup_update_task", + description="Update an existing task in ClickUp.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to update.", + }, + "name": { + "type": "string", + "description": "The new name of the task.", + }, + "description": { + "type": "string", + "description": "The new description of the task.", + }, + "status": { + "type": "string", + "description": "The new status of the task.", + }, + "priority": { + "type": "integer", + "description": "New priority level (1=urgent, 2=high, 3=normal, 4=low).", + }, + "due_date": { + "type": "integer", + "description": "New due date as Unix timestamp in milliseconds.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TASK"} + ), + ), + types.Tool( + name="clickup_search_tasks", + description="Search for tasks by text query.", + inputSchema={ + "type": "object", + "required": ["team_id", "query"], + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to search in.", + }, + "query": { + "type": "string", + "description": "The text to search for in task names and descriptions.", + }, + "start": { + "type": "integer", + "description": "Starting position for pagination (default: 0).", + "default": 0, + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (default: 20).", + "default": 20, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_TASK", "readOnlyHint": True} + ), + ), + + # Comment tools + types.Tool( + name="clickup_get_comments", + description="Get comments for a specific task.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to get comments for.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_COMMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_create_comment", + description="Create a comment on a task.", + inputSchema={ + "type": "object", + "required": ["task_id", "comment_text"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to comment on.", + }, + "comment_text": { + "type": "string", + "description": "The content of the comment.", + }, + "notify_all": { + "type": "boolean", + "description": "Whether to notify all task watchers (default: true).", + "default": True, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_COMMENT"} + ), + ), + types.Tool( + name="clickup_update_comment", + description="Update an existing comment.", + inputSchema={ + "type": "object", + "required": ["comment_id", "comment_text"], + "properties": { + "comment_id": { + "type": "string", + "description": "The ID of the comment to update.", + }, + "comment_text": { + "type": "string", + "description": "The new content of the comment.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_COMMENT"} + ), + ), + + # User tools + types.Tool( + name="clickup_get_user", + description="Get the current user's information.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="clickup_get_team_members", + description="Get all team members.", + inputSchema={ + "type": "object", + "required": ["team_id"], + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to get members from.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLICKUP_USER", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + try: + if name == "clickup_get_teams": + result = await get_teams() + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_workspaces": + result = await get_workspaces() + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_spaces": + team_id = arguments.get("team_id") + if not team_id: + return [types.TextContent(type="text", text="Error: team_id parameter is required")] + result = await get_spaces(team_id) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_create_space": + team_id = arguments.get("team_id") + name = arguments.get("name") + if not team_id or not name: + return [types.TextContent(type="text", text="Error: team_id and name parameters are required")] + color = arguments.get("color") + private = arguments.get("private", False) + result = await create_space(team_id, name, color, private) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_update_space": + space_id = arguments.get("space_id") + if not space_id: + return [types.TextContent(type="text", text="Error: space_id parameter is required")] + name = arguments.get("name") + color = arguments.get("color") + private = arguments.get("private") + result = await update_space(space_id, name, color, private) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_folders": + space_id = arguments.get("space_id") + if not space_id: + return [types.TextContent(type="text", text="Error: space_id parameter is required")] + result = await get_folders(space_id) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_create_folder": + space_id = arguments.get("space_id") + name = arguments.get("name") + if not space_id or not name: + return [types.TextContent(type="text", text="Error: space_id and name parameters are required")] + result = await create_folder(space_id, name) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_update_folder": + folder_id = arguments.get("folder_id") + name = arguments.get("name") + if not folder_id or not name: + return [types.TextContent(type="text", text="Error: folder_id and name parameters are required")] + result = await update_folder(folder_id, name) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_lists": + folder_id = arguments.get("folder_id") + space_id = arguments.get("space_id") + if not folder_id and not space_id: + return [types.TextContent(type="text", text="Error: either folder_id or space_id parameter is required")] + result = await get_lists(folder_id, space_id) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_create_list": + folder_id = arguments.get("folder_id") + space_id = arguments.get("space_id") + name = arguments.get("name") + if not name: + return [types.TextContent(type="text", text="Error: name parameter is required")] + if not folder_id and not space_id: + return [types.TextContent(type="text", text="Error: either folder_id or space_id parameter is required")] + content = arguments.get("content") + due_date = arguments.get("due_date") + priority = arguments.get("priority") + assignee = arguments.get("assignee") + status = arguments.get("status") + result = await create_list(folder_id, space_id, name, content, due_date, priority, assignee, status) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_update_list": + list_id = arguments.get("list_id") + if not list_id: + return [types.TextContent(type="text", text="Error: list_id parameter is required")] + name = arguments.get("name") + content = arguments.get("content") + due_date = arguments.get("due_date") + priority = arguments.get("priority") + assignee = arguments.get("assignee") + unset_status = arguments.get("unset_status", False) + result = await update_list(list_id, name, content, due_date, priority, assignee, unset_status) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_tasks": + list_id = arguments.get("list_id") + if not list_id: + return [types.TextContent(type="text", text="Error: list_id parameter is required")] + archived = arguments.get("archived", False) + include_closed = arguments.get("include_closed", False) + page = arguments.get("page", 0) + subtasks = arguments.get("subtasks", False) + result = await get_tasks(list_id, archived, include_closed, page, subtasks=subtasks) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_task_by_id": + task_id = arguments.get("task_id") + if not task_id: + return [types.TextContent(type="text", text="Error: task_id parameter is required")] + include_subtasks = arguments.get("include_subtasks", False) + result = await get_task_by_id(task_id, include_subtasks=include_subtasks) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_create_task": + list_id = arguments.get("list_id") + name = arguments.get("name") + if not list_id or not name: + return [types.TextContent(type="text", text="Error: list_id and name parameters are required")] + description = arguments.get("description") + assignees = arguments.get("assignees") + status = arguments.get("status") + priority = arguments.get("priority") + due_date = arguments.get("due_date") + result = await create_task(list_id, name, description, assignees, None, status, priority, due_date) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_update_task": + task_id = arguments.get("task_id") + if not task_id: + return [types.TextContent(type="text", text="Error: task_id parameter is required")] + name = arguments.get("name") + description = arguments.get("description") + status = arguments.get("status") + priority = arguments.get("priority") + due_date = arguments.get("due_date") + result = await update_task(task_id, name, description, status, priority, due_date) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_search_tasks": + team_id = arguments.get("team_id") + query = arguments.get("query") + if not team_id or not query: + return [types.TextContent(type="text", text="Error: team_id and query parameters are required")] + start = arguments.get("start", 0) + limit = arguments.get("limit", 20) + result = await search_tasks(team_id, query, start, limit) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_comments": + task_id = arguments.get("task_id") + if not task_id: + return [types.TextContent(type="text", text="Error: task_id parameter is required")] + result = await get_comments(task_id) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_create_comment": + task_id = arguments.get("task_id") + comment_text = arguments.get("comment_text") + if not task_id or not comment_text: + return [types.TextContent(type="text", text="Error: task_id and comment_text parameters are required")] + notify_all = arguments.get("notify_all", True) + result = await create_comment(task_id, comment_text, notify_all=notify_all) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_update_comment": + comment_id = arguments.get("comment_id") + comment_text = arguments.get("comment_text") + if not comment_id or not comment_text: + return [types.TextContent(type="text", text="Error: comment_id and comment_text parameters are required")] + result = await update_comment(comment_id, comment_text) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_user": + result = await get_user() + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + elif name == "clickup_get_team_members": + team_id = arguments.get("team_id") + if not team_id: + return [types.TextContent(type="text", text="Error: team_id parameter is required")] + result = await get_team_members(team_id) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + else: + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract access token from headers + access_token = extract_access_token(request) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract access token from headers + access_token = extract_access_token(scope) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/clickup/tools/__init__.py b/mcp_servers/clickup/tools/__init__.py new file mode 100644 index 00000000..e277b8ad --- /dev/null +++ b/mcp_servers/clickup/tools/__init__.py @@ -0,0 +1,51 @@ +# ClickUp MCP Server Tools +# This package contains all the tool implementations organized by object type + +from .teams import get_teams, get_workspaces +from .spaces import get_spaces, create_space, update_space +from .folders import get_folders, create_folder, update_folder +from .lists import get_lists, create_list, update_list +from .tasks import get_tasks, get_task_by_id, create_task, update_task, search_tasks +from .comments import get_comments, create_comment, update_comment +from .users import get_user, get_team_members +from .base import auth_token_context + +__all__ = [ + # Teams/Workspaces + "get_teams", + "get_workspaces", + + # Spaces + "get_spaces", + "create_space", + "update_space", + + # Folders + "get_folders", + "create_folder", + "update_folder", + + # Lists + "get_lists", + "create_list", + "update_list", + + # Tasks + "get_tasks", + "get_task_by_id", + "create_task", + "update_task", + "search_tasks", + + # Comments + "get_comments", + "create_comment", + "update_comment", + + # Users + "get_user", + "get_team_members", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/clickup/tools/base.py b/mcp_servers/clickup/tools/base.py new file mode 100644 index 00000000..29d9a33a --- /dev/null +++ b/mcp_servers/clickup/tools/base.py @@ -0,0 +1,50 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +CLICKUP_API_BASE_URL = "https://api.clickup.com/api/v2" + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +async def make_clickup_request( + endpoint: str, + method: str = "GET", + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make a REST API request to ClickUp API.""" + access_token = get_auth_token() + + headers = { + "Authorization": access_token, + "Content-Type": "application/json" + } + + url = f"{CLICKUP_API_BASE_URL}/{endpoint.lstrip('/')}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data, params=params) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data, params=params) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers, params=params) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + return response.json() \ No newline at end of file diff --git a/mcp_servers/clickup/tools/comments.py b/mcp_servers/clickup/tools/comments.py new file mode 100644 index 00000000..64396e45 --- /dev/null +++ b/mcp_servers/clickup/tools/comments.py @@ -0,0 +1,74 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_comments(task_id: str, custom_task_ids: bool = False, team_id: Optional[str] = None) -> Dict[str, Any]: + """Get comments for a specific task.""" + logger.info(f"Executing tool: get_comments with task_id: {task_id}") + try: + params = { + "custom_task_ids": str(custom_task_ids).lower() + } + if team_id: + params["team_id"] = team_id + + result = await make_clickup_request(f"task/{task_id}/comment", params=params) + return result + except Exception as e: + logger.exception(f"Error executing tool get_comments: {e}") + raise e + +async def create_comment( + task_id: str, + comment_text: str, + assignee: Optional[str] = None, + notify_all: bool = True, + custom_task_ids: bool = False, + team_id: Optional[str] = None +) -> Dict[str, Any]: + """Create a comment on a task.""" + logger.info(f"Executing tool: create_comment on task: {task_id}") + try: + data = { + "comment_text": comment_text, + "notify_all": notify_all + } + if assignee: + data["assignee"] = assignee + + params = { + "custom_task_ids": str(custom_task_ids).lower() + } + if team_id: + params["team_id"] = team_id + + result = await make_clickup_request(f"task/{task_id}/comment", "POST", data, params) + return result + except Exception as e: + logger.exception(f"Error executing tool create_comment: {e}") + raise e + +async def update_comment( + comment_id: str, + comment_text: str, + assignee: Optional[str] = None, + resolved: Optional[bool] = None +) -> Dict[str, Any]: + """Update an existing comment.""" + logger.info(f"Executing tool: update_comment with comment_id: {comment_id}") + try: + data = {"comment_text": comment_text} + + if assignee: + data["assignee"] = assignee + if resolved is not None: + data["resolved"] = resolved + + result = await make_clickup_request(f"comment/{comment_id}", "PUT", data) + return result + except Exception as e: + logger.exception(f"Error executing tool update_comment: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/folders.py b/mcp_servers/clickup/tools/folders.py new file mode 100644 index 00000000..36a032ed --- /dev/null +++ b/mcp_servers/clickup/tools/folders.py @@ -0,0 +1,38 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_folders(space_id: str) -> Dict[str, Any]: + """Get all folders in a space.""" + logger.info(f"Executing tool: get_folders with space_id: {space_id}") + try: + result = await make_clickup_request(f"space/{space_id}/folder") + return result + except Exception as e: + logger.exception(f"Error executing tool get_folders: {e}") + raise e + +async def create_folder(space_id: str, name: str) -> Dict[str, Any]: + """Create a new folder in a space.""" + logger.info(f"Executing tool: create_folder with name: {name}") + try: + data = {"name": name} + result = await make_clickup_request(f"space/{space_id}/folder", "POST", data) + return result + except Exception as e: + logger.exception(f"Error executing tool create_folder: {e}") + raise e + +async def update_folder(folder_id: str, name: str) -> Dict[str, Any]: + """Update an existing folder.""" + logger.info(f"Executing tool: update_folder with folder_id: {folder_id}") + try: + data = {"name": name} + result = await make_clickup_request(f"folder/{folder_id}", "PUT", data) + return result + except Exception as e: + logger.exception(f"Error executing tool update_folder: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/lists.py b/mcp_servers/clickup/tools/lists.py new file mode 100644 index 00000000..a6cefcdf --- /dev/null +++ b/mcp_servers/clickup/tools/lists.py @@ -0,0 +1,89 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_lists(folder_id: Optional[str] = None, space_id: Optional[str] = None) -> Dict[str, Any]: + """Get all lists in a folder or space.""" + logger.info(f"Executing tool: get_lists with folder_id: {folder_id}, space_id: {space_id}") + try: + if folder_id: + result = await make_clickup_request(f"folder/{folder_id}/list") + elif space_id: + result = await make_clickup_request(f"space/{space_id}/list") + else: + raise ValueError("Either folder_id or space_id must be provided") + return result + except Exception as e: + logger.exception(f"Error executing tool get_lists: {e}") + raise e + +async def create_list( + folder_id: Optional[str] = None, + space_id: Optional[str] = None, + name: str = None, + content: Optional[str] = None, + due_date: Optional[str] = None, + priority: Optional[int] = None, + assignee: Optional[str] = None, + status: Optional[str] = None +) -> Dict[str, Any]: + """Create a new list in a folder or space.""" + logger.info(f"Executing tool: create_list with name: {name}") + try: + data = {"name": name} + if content: + data["content"] = content + if due_date: + data["due_date"] = due_date + if priority: + data["priority"] = priority + if assignee: + data["assignee"] = assignee + if status: + data["status"] = status + + if folder_id: + result = await make_clickup_request(f"folder/{folder_id}/list", "POST", data) + elif space_id: + result = await make_clickup_request(f"space/{space_id}/list", "POST", data) + else: + raise ValueError("Either folder_id or space_id must be provided") + return result + except Exception as e: + logger.exception(f"Error executing tool create_list: {e}") + raise e + +async def update_list( + list_id: str, + name: Optional[str] = None, + content: Optional[str] = None, + due_date: Optional[str] = None, + priority: Optional[int] = None, + assignee: Optional[str] = None, + unset_status: bool = False +) -> Dict[str, Any]: + """Update an existing list.""" + logger.info(f"Executing tool: update_list with list_id: {list_id}") + try: + data = {} + if name: + data["name"] = name + if content: + data["content"] = content + if due_date: + data["due_date"] = due_date + if priority: + data["priority"] = priority + if assignee: + data["assignee"] = assignee + if unset_status: + data["unset_status"] = unset_status + + result = await make_clickup_request(f"list/{list_id}", "PUT", data) + return result + except Exception as e: + logger.exception(f"Error executing tool update_list: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/spaces.py b/mcp_servers/clickup/tools/spaces.py new file mode 100644 index 00000000..97b0c16e --- /dev/null +++ b/mcp_servers/clickup/tools/spaces.py @@ -0,0 +1,51 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_spaces(team_id: str) -> Dict[str, Any]: + """Get all spaces in a team.""" + logger.info(f"Executing tool: get_spaces with team_id: {team_id}") + try: + result = await make_clickup_request(f"team/{team_id}/space") + return result + except Exception as e: + logger.exception(f"Error executing tool get_spaces: {e}") + raise e + +async def create_space(team_id: str, name: str, color: Optional[str] = None, private: bool = False) -> Dict[str, Any]: + """Create a new space in a team.""" + logger.info(f"Executing tool: create_space with name: {name}") + try: + data = { + "name": name, + "private": private + } + if color: + data["color"] = color + + result = await make_clickup_request(f"team/{team_id}/space", "POST", data) + return result + except Exception as e: + logger.exception(f"Error executing tool create_space: {e}") + raise e + +async def update_space(space_id: str, name: Optional[str] = None, color: Optional[str] = None, private: Optional[bool] = None) -> Dict[str, Any]: + """Update an existing space.""" + logger.info(f"Executing tool: update_space with space_id: {space_id}") + try: + data = {} + if name: + data["name"] = name + if color: + data["color"] = color + if private is not None: + data["private"] = private + + result = await make_clickup_request(f"space/{space_id}", "PUT", data) + return result + except Exception as e: + logger.exception(f"Error executing tool update_space: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/tasks.py b/mcp_servers/clickup/tools/tasks.py new file mode 100644 index 00000000..115d5fc7 --- /dev/null +++ b/mcp_servers/clickup/tools/tasks.py @@ -0,0 +1,226 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_tasks( + list_id: str, + archived: bool = False, + include_closed: bool = False, + page: int = 0, + order_by: str = "created", + reverse: bool = False, + subtasks: bool = False, + statuses: Optional[List[str]] = None, + include_markdown_description: bool = False, + assignees: Optional[List[str]] = None, + tags: Optional[List[str]] = None, + due_date_gt: Optional[int] = None, + due_date_lt: Optional[int] = None, + date_created_gt: Optional[int] = None, + date_created_lt: Optional[int] = None, + date_updated_gt: Optional[int] = None, + date_updated_lt: Optional[int] = None +) -> Dict[str, Any]: + """Get tasks from a list with optional filtering.""" + logger.info(f"Executing tool: get_tasks with list_id: {list_id}") + try: + params = { + "archived": str(archived).lower(), + "include_closed": str(include_closed).lower(), + "page": page, + "order_by": order_by, + "reverse": str(reverse).lower(), + "subtasks": str(subtasks).lower(), + "include_markdown_description": str(include_markdown_description).lower() + } + + if statuses: + params["statuses[]"] = statuses + if assignees: + params["assignees[]"] = assignees + if tags: + params["tags[]"] = tags + if due_date_gt: + params["due_date_gt"] = due_date_gt + if due_date_lt: + params["due_date_lt"] = due_date_lt + if date_created_gt: + params["date_created_gt"] = date_created_gt + if date_created_lt: + params["date_created_lt"] = date_created_lt + if date_updated_gt: + params["date_updated_gt"] = date_updated_gt + if date_updated_lt: + params["date_updated_lt"] = date_updated_lt + + result = await make_clickup_request(f"list/{list_id}/task", params=params) + return result + except Exception as e: + logger.exception(f"Error executing tool get_tasks: {e}") + raise e + +async def get_task_by_id(task_id: str, custom_task_ids: bool = False, team_id: Optional[str] = None, include_subtasks: bool = False) -> Dict[str, Any]: + """Get a specific task by ID.""" + logger.info(f"Executing tool: get_task_by_id with task_id: {task_id}") + try: + params = { + "custom_task_ids": str(custom_task_ids).lower(), + "include_subtasks": str(include_subtasks).lower() + } + if team_id: + params["team_id"] = team_id + + result = await make_clickup_request(f"task/{task_id}", params=params) + return result + except Exception as e: + logger.exception(f"Error executing tool get_task_by_id: {e}") + raise e + +async def create_task( + list_id: str, + name: str, + description: Optional[str] = None, + assignees: Optional[List[str]] = None, + tags: Optional[List[str]] = None, + status: Optional[str] = None, + priority: Optional[int] = None, + due_date: Optional[int] = None, + due_date_time: bool = False, + time_estimate: Optional[int] = None, + start_date: Optional[int] = None, + start_date_time: bool = False, + notify_all: bool = True, + parent: Optional[str] = None, + links_to: Optional[str] = None, + check_required_custom_fields: bool = True, + custom_task_ids: bool = False, + team_id: Optional[str] = None +) -> Dict[str, Any]: + """Create a new task.""" + logger.info(f"Executing tool: create_task with name: {name}") + try: + data = {"name": name} + + if description: + data["description"] = description + if assignees: + data["assignees"] = assignees + if tags: + data["tags"] = tags + if status: + data["status"] = status + if priority: + data["priority"] = priority + if due_date: + data["due_date"] = due_date + if due_date_time: + data["due_date_time"] = due_date_time + if time_estimate: + data["time_estimate"] = time_estimate + if start_date: + data["start_date"] = start_date + if start_date_time: + data["start_date_time"] = start_date_time + if notify_all is not None: + data["notify_all"] = notify_all + if parent: + data["parent"] = parent + if links_to: + data["links_to"] = links_to + if check_required_custom_fields is not None: + data["check_required_custom_fields"] = check_required_custom_fields + + params = { + "custom_task_ids": str(custom_task_ids).lower() + } + if team_id: + params["team_id"] = team_id + + result = await make_clickup_request(f"list/{list_id}/task", "POST", data, params) + return result + except Exception as e: + logger.exception(f"Error executing tool create_task: {e}") + raise e + +async def update_task( + task_id: str, + name: Optional[str] = None, + description: Optional[str] = None, + status: Optional[str] = None, + priority: Optional[int] = None, + due_date: Optional[int] = None, + due_date_time: Optional[bool] = None, + parent: Optional[str] = None, + time_estimate: Optional[int] = None, + start_date: Optional[int] = None, + start_date_time: Optional[bool] = None, + assignees: Optional[Dict[str, Any]] = None, + archived: Optional[bool] = None, + custom_task_ids: bool = False, + team_id: Optional[str] = None +) -> Dict[str, Any]: + """Update an existing task.""" + logger.info(f"Executing tool: update_task with task_id: {task_id}") + try: + data = {} + + if name: + data["name"] = name + if description is not None: + data["description"] = description + if status: + data["status"] = status + if priority is not None: + data["priority"] = priority + if due_date is not None: + data["due_date"] = due_date + if due_date_time is not None: + data["due_date_time"] = due_date_time + if parent: + data["parent"] = parent + if time_estimate is not None: + data["time_estimate"] = time_estimate + if start_date is not None: + data["start_date"] = start_date + if start_date_time is not None: + data["start_date_time"] = start_date_time + if assignees: + data["assignees"] = assignees + if archived is not None: + data["archived"] = archived + + params = { + "custom_task_ids": str(custom_task_ids).lower() + } + if team_id: + params["team_id"] = team_id + + result = await make_clickup_request(f"task/{task_id}", "PUT", data, params) + return result + except Exception as e: + logger.exception(f"Error executing tool update_task: {e}") + raise e + +async def search_tasks( + team_id: str, + query: str, + start: int = 0, + limit: int = 20 +) -> Dict[str, Any]: + """Search for tasks by text query.""" + logger.info(f"Executing tool: search_tasks with query: {query}") + try: + params = { + "query": query, + "start": start, + "limit": limit + } + + result = await make_clickup_request(f"team/{team_id}/task", params=params) + return result + except Exception as e: + logger.exception(f"Error executing tool search_tasks: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/teams.py b/mcp_servers/clickup/tools/teams.py new file mode 100644 index 00000000..8736af88 --- /dev/null +++ b/mcp_servers/clickup/tools/teams.py @@ -0,0 +1,25 @@ +import logging +from typing import Any, Dict +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_teams() -> Dict[str, Any]: + """Get all teams/workspaces the user has access to.""" + logger.info("Executing tool: get_teams") + try: + result = await make_clickup_request("team") + return result + except Exception as e: + logger.exception(f"Error executing tool get_teams: {e}") + raise e + +async def get_workspaces() -> Dict[str, Any]: + """Get all workspaces (alias for get_teams for consistency).""" + logger.info("Executing tool: get_workspaces") + try: + return await get_teams() + except Exception as e: + logger.exception(f"Error executing tool get_workspaces: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/users.py b/mcp_servers/clickup/tools/users.py new file mode 100644 index 00000000..c9855cbe --- /dev/null +++ b/mcp_servers/clickup/tools/users.py @@ -0,0 +1,26 @@ +import logging +from typing import Any, Dict +from .base import make_clickup_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_user() -> Dict[str, Any]: + """Get the current user's information.""" + logger.info("Executing tool: get_user") + try: + result = await make_clickup_request("user") + return result + except Exception as e: + logger.exception(f"Error executing tool get_user: {e}") + raise e + +async def get_team_members(team_id: str) -> Dict[str, Any]: + """Get all team members.""" + logger.info(f"Executing tool: get_team_members with team_id: {team_id}") + try: + result = await make_clickup_request(f"team/{team_id}/member") + return result + except Exception as e: + logger.exception(f"Error executing tool get_team_members: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/close/.env.example b/mcp_servers/close/.env.example new file mode 100644 index 00000000..d40a5ad2 --- /dev/null +++ b/mcp_servers/close/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +CLOSE_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/close/Dockerfile b/mcp_servers/close/Dockerfile new file mode 100644 index 00000000..9ae0deaa --- /dev/null +++ b/mcp_servers/close/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/close/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/close/server.py . +COPY mcp_servers/close/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/close/README.md b/mcp_servers/close/README.md new file mode 100644 index 00000000..5d799525 --- /dev/null +++ b/mcp_servers/close/README.md @@ -0,0 +1,78 @@ +# Close MCP Server + +A Model Context Protocol (MCP) server for Close CRM integration. Manage sales activities, leads, and opportunities using Close's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Close with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("CLOSE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/close-mcp-server:latest + + +# Run Close MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/close-mcp-server:latest + + +# Run Close MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_close_api_key_here"}' \ + ghcr.io/klavis-ai/close-mcp-server:latest +``` + +**OAuth Setup:** Close requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Lead Management**: Create, update, and manage sales leads +- **Opportunity Tracking**: Handle sales opportunities and pipeline +- **Activity Management**: Track calls, emails, and sales activities +- **Contact Operations**: Manage contact information and relationships +- **Sales Analytics**: Access sales metrics and performance data + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/close/__init__.py b/mcp_servers/close/__init__.py new file mode 100644 index 00000000..4b9d998e --- /dev/null +++ b/mcp_servers/close/__init__.py @@ -0,0 +1 @@ +# Close CRM MCP Server \ No newline at end of file diff --git a/mcp_servers/close/requirements.txt b/mcp_servers/close/requirements.txt new file mode 100644 index 00000000..a5999950 --- /dev/null +++ b/mcp_servers/close/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette +asyncio \ No newline at end of file diff --git a/mcp_servers/close/server.py b/mcp_servers/close/server.py new file mode 100644 index 00000000..6cb30b9d --- /dev/null +++ b/mcp_servers/close/server.py @@ -0,0 +1,947 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools.base import CloseToolExecutionError + +# Import tools +from tools import leads as lead_tools +from tools import contacts as contact_tools +from tools import opportunities as opportunity_tools +from tools import tasks as task_tools +from tools import users as user_tools +from tools.base import auth_token_context + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +CLOSE_MCP_SERVER_PORT = int(os.getenv("CLOSE_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=CLOSE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + app = Server("close-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Lead Management Tools + types.Tool( + name="close_create_lead", + description="Create a new lead in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the lead/company", + }, + "description": { + "type": "string", + "description": "Description of the lead", + }, + "status_id": { + "type": "string", + "description": "The ID of the lead status", + }, + "url": { + "type": "string", + "description": "Website URL of the lead", + }, + "contacts": { + "type": "array", + "description": "Array of contact objects to create with the lead", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "title": {"type": "string"}, + "emails": { + "type": "array", + "items": { + "type": "object", + "properties": { + "email": {"type": "string"}, + "type": {"type": "string"} + } + } + }, + "phones": { + "type": "array", + "items": { + "type": "object", + "properties": { + "phone": {"type": "string"}, + "type": {"type": "string"} + } + } + } + } + } + }, + "addresses": { + "type": "array", + "description": "Array of address objects", + "items": { + "type": "object", + "properties": { + "address_1": {"type": "string"}, + "address_2": {"type": "string"}, + "city": {"type": "string"}, + "state": {"type": "string"}, + "zipcode": {"type": "string"}, + "country": {"type": "string"} + } + } + } + }, + "required": ["name"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD"} + ), + ), + types.Tool( + name="close_get_lead", + description="Get a lead by its ID from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead to get", + }, + "fields": { + "type": "string", + "description": "Comma-separated list of fields to return", + }, + }, + "required": ["lead_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_search_leads", + description="Search for leads in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query string", + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-200, default 25)", + "minimum": 1, + "maximum": 200, + }, + "status_id": { + "type": "string", + "description": "Filter by lead status ID", + }, + }, + "required": ["query"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_update_lead", + description="Update an existing lead in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead to update", + }, + "name": { + "type": "string", + "description": "The name of the lead/company", + }, + "description": { + "type": "string", + "description": "Description of the lead", + }, + "status_id": { + "type": "string", + "description": "The ID of the lead status", + }, + "url": { + "type": "string", + "description": "Website URL of the lead", + }, + }, + "required": ["lead_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD"} + ), + ), + types.Tool( + name="close_delete_lead", + description="Delete a lead from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead to delete", + }, + }, + "required": ["lead_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD"} + ), + ), + types.Tool( + name="close_list_leads", + description="List leads from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-200, default 100)", + "minimum": 1, + "maximum": 200, + }, + "skip": { + "type": "integer", + "description": "Number of results to skip for pagination", + "minimum": 0, + }, + "query": { + "type": "string", + "description": "Search query string", + }, + "status_id": { + "type": "string", + "description": "Filter by lead status ID", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_LEAD", "readOnlyHint": True} + ), + ), + + # Contact Management Tools + types.Tool( + name="close_create_contact", + description="Create a new contact in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead this contact belongs to", + }, + "name": { + "type": "string", + "description": "The name of the contact", + }, + "title": { + "type": "string", + "description": "Job title of the contact", + }, + "emails": { + "type": "array", + "description": "Array of email objects", + "items": { + "type": "object", + "properties": { + "email": {"type": "string"}, + "type": {"type": "string"} + } + } + }, + "phones": { + "type": "array", + "description": "Array of phone objects", + "items": { + "type": "object", + "properties": { + "phone": {"type": "string"}, + "type": {"type": "string"} + } + } + }, + }, + "required": ["lead_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_CONTACT"} + ), + ), + types.Tool( + name="close_get_contact", + description="Get a contact by its ID from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "The ID of the contact to get", + }, + }, + "required": ["contact_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_CONTACT", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_search_contacts", + description="Search for contacts in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query string", + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-200, default 25)", + "minimum": 1, + "maximum": 200, + }, + "lead_id": { + "type": "string", + "description": "Filter by lead ID", + }, + }, + "required": ["query"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_CONTACT", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_update_contact", + description="Update an existing contact in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "The ID of the contact to update", + }, + "name": { + "type": "string", + "description": "The name of the contact", + }, + "title": { + "type": "string", + "description": "Job title of the contact", + }, + "emails": { + "type": "array", + "description": "Array of email objects", + "items": { + "type": "object", + "properties": { + "email": {"type": "string"}, + "type": {"type": "string"} + } + } + }, + "phones": { + "type": "array", + "description": "Array of phone objects", + "items": { + "type": "object", + "properties": { + "phone": {"type": "string"}, + "type": {"type": "string"} + } + } + }, + }, + "required": ["contact_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_CONTACT"} + ), + ), + types.Tool( + name="close_delete_contact", + description="Delete a contact from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "The ID of the contact to delete", + }, + }, + "required": ["contact_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_CONTACT"} + ), + ), + + # Opportunity Management Tools + types.Tool( + name="close_create_opportunity", + description="Create a new opportunity in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead this opportunity belongs to", + }, + "note": { + "type": "string", + "description": "Notes about the opportunity", + }, + "confidence": { + "type": "integer", + "description": "Confidence percentage (0-100)", + "minimum": 0, + "maximum": 100, + }, + "value": { + "type": "number", + "description": "Monetary value of the opportunity", + }, + "value_period": { + "type": "string", + "description": "Value period (one_time, monthly, annual, etc.)", + }, + "status_id": { + "type": "string", + "description": "The ID of the opportunity status", + }, + "expected_date": { + "type": "string", + "description": "Expected close date (YYYY-MM-DD format)", + }, + }, + "required": ["lead_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_OPPORTUNITY"} + ), + ), + types.Tool( + name="close_get_opportunity", + description="Get an opportunity by its ID from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "opportunity_id": { + "type": "string", + "description": "The ID of the opportunity to get", + }, + }, + "required": ["opportunity_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_OPPORTUNITY", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_update_opportunity", + description="Update an existing opportunity in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "opportunity_id": { + "type": "string", + "description": "The ID of the opportunity to update", + }, + "note": { + "type": "string", + "description": "Notes about the opportunity", + }, + "confidence": { + "type": "integer", + "description": "Confidence percentage (0-100)", + "minimum": 0, + "maximum": 100, + }, + "value": { + "type": "number", + "description": "Monetary value of the opportunity", + }, + "value_period": { + "type": "string", + "description": "Value period (one_time, monthly, annual, etc.)", + }, + "status_id": { + "type": "string", + "description": "The ID of the opportunity status", + }, + "expected_date": { + "type": "string", + "description": "Expected close date (YYYY-MM-DD format)", + }, + }, + "required": ["opportunity_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_OPPORTUNITY"} + ), + ), + types.Tool( + name="close_delete_opportunity", + description="Delete an opportunity from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "opportunity_id": { + "type": "string", + "description": "The ID of the opportunity to delete", + }, + }, + "required": ["opportunity_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_OPPORTUNITY"} + ), + ), + + # Task Management Tools + types.Tool( + name="close_create_task", + description="Create a new task in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "lead_id": { + "type": "string", + "description": "The ID of the lead this task belongs to", + }, + "text": { + "type": "string", + "description": "Task description/text", + }, + "assigned_to": { + "type": "string", + "description": "User ID to assign the task to", + }, + "date": { + "type": "string", + "description": "Due date for the task (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS format)", + }, + "is_complete": { + "type": "boolean", + "description": "Whether the task is complete", + }, + }, + "required": ["lead_id", "text"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_TASK"} + ), + ), + types.Tool( + name="close_get_task", + description="Get a task by its ID from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to get", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_TASK", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_update_task", + description="Update an existing task in Close CRM", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to update", + }, + "text": { + "type": "string", + "description": "Task description/text", + }, + "assigned_to": { + "type": "string", + "description": "User ID to assign the task to", + }, + "date": { + "type": "string", + "description": "Due date for the task (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS format)", + }, + "is_complete": { + "type": "boolean", + "description": "Whether the task is complete", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_TASK"} + ), + ), + types.Tool( + name="close_delete_task", + description="Delete a task from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to delete", + }, + }, + "required": ["task_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_TASK"} + ), + ), + types.Tool( + name="close_list_tasks", + description="List tasks from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-200, default 100)", + "minimum": 1, + "maximum": 200, + }, + "skip": { + "type": "integer", + "description": "Number of results to skip for pagination", + "minimum": 0, + }, + "lead_id": { + "type": "string", + "description": "Filter by lead ID", + }, + "assigned_to": { + "type": "string", + "description": "Filter by assigned user ID", + }, + "is_complete": { + "type": "boolean", + "description": "Filter by completion status", + }, + "task_type": { + "type": "string", + "description": "Filter by task type (lead, incoming_email, etc.)", + }, + "view": { + "type": "string", + "description": "View filter (inbox, future, archive)", + "enum": ["inbox", "future", "archive"] + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_TASK", "readOnlyHint": True} + ), + ), + + # User Management Tools + types.Tool( + name="close_get_current_user", + description="Get information about the current user", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_list_users", + description="List users from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-200, default 100)", + "minimum": 1, + "maximum": 200, + }, + "skip": { + "type": "integer", + "description": "Number of results to skip for pagination", + "minimum": 0, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="close_get_user", + description="Get a user by their ID from Close CRM", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "The ID of the user to get", + }, + }, + "required": ["user_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CLOSE_USER", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + try: + if name == "close_create_lead": + result = await lead_tools.create_lead(**arguments) + elif name == "close_get_lead": + result = await lead_tools.get_lead(**arguments) + elif name == "close_search_leads": + result = await lead_tools.search_leads(**arguments) + elif name == "close_update_lead": + result = await lead_tools.update_lead(**arguments) + elif name == "close_delete_lead": + result = await lead_tools.delete_lead(**arguments) + elif name == "close_list_leads": + result = await lead_tools.list_leads(**arguments) + + elif name == "close_create_contact": + result = await contact_tools.create_contact(**arguments) + elif name == "close_get_contact": + result = await contact_tools.get_contact(**arguments) + elif name == "close_search_contacts": + result = await contact_tools.search_contacts(**arguments) + elif name == "close_update_contact": + result = await contact_tools.update_contact(**arguments) + elif name == "close_delete_contact": + result = await contact_tools.delete_contact(**arguments) + + elif name == "close_create_opportunity": + result = await opportunity_tools.create_opportunity(**arguments) + elif name == "close_get_opportunity": + result = await opportunity_tools.get_opportunity(**arguments) + elif name == "close_update_opportunity": + result = await opportunity_tools.update_opportunity(**arguments) + elif name == "close_delete_opportunity": + result = await opportunity_tools.delete_opportunity(**arguments) + + elif name == "close_create_task": + result = await task_tools.create_task(**arguments) + elif name == "close_get_task": + result = await task_tools.get_task(**arguments) + elif name == "close_update_task": + result = await task_tools.update_task(**arguments) + elif name == "close_delete_task": + result = await task_tools.delete_task(**arguments) + elif name == "close_list_tasks": + result = await task_tools.list_tasks(**arguments) + + elif name == "close_get_current_user": + result = await user_tools.get_current_user() + elif name == "close_list_users": + result = await user_tools.list_users(**arguments) + elif name == "close_get_user": + result = await user_tools.get_user(**arguments) + else: + raise ValueError(f"Unknown tool: {name}") + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except CloseToolExecutionError as e: + logger.error(f"Close CRM error in {name}: {e}") + error_response = { + "error": str(e), + "developer_message": getattr(e, "developer_message", ""), + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + except Exception as e: + logger.exception(f"Unexpected error in tool {name}") + error_response = { + "error": f"Unexpected error: {str(e)}", + "developer_message": f"Unexpected error in tool {name}: {type(e).__name__}: {str(e)}", + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + try: + uvicorn.run( + starlette_app, + host="0.0.0.0", + port=port, + log_level=log_level.lower(), + ) + return 0 + except Exception as e: + logger.exception(f"Failed to start server: {e}") + return 1 + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/mcp_servers/close/tools/__init__.py b/mcp_servers/close/tools/__init__.py new file mode 100644 index 00000000..71289299 --- /dev/null +++ b/mcp_servers/close/tools/__init__.py @@ -0,0 +1,6 @@ +# Tools for Close CRM MCP Server +from . import leads +from . import contacts +from . import opportunities +from . import tasks +from . import users \ No newline at end of file diff --git a/mcp_servers/close/tools/base.py b/mcp_servers/close/tools/base.py new file mode 100644 index 00000000..bd44db25 --- /dev/null +++ b/mcp_servers/close/tools/base.py @@ -0,0 +1,339 @@ +import asyncio +import json +from dataclasses import dataclass +import logging +from typing import Any, Dict, Optional, cast +from contextvars import ContextVar +from functools import wraps + +import httpx + +from .constants import CLOSE_API_VERSION, CLOSE_BASE_URL, CLOSE_MAX_CONCURRENT_REQUESTS, CLOSE_MAX_TIMEOUT_SECONDS + +# Configure logging +logger = logging.getLogger(__name__) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +# Type definitions +ToolResponse = dict[str, Any] + +# Exception classes +class ToolExecutionError(Exception): + def __init__(self, message: str, developer_message: str = ""): + super().__init__(message) + self.developer_message = developer_message + + +class CloseToolExecutionError(ToolExecutionError): + pass + + +class PaginationTimeoutError(CloseToolExecutionError): + def __init__(self, timeout_seconds: int, tool_name: str): + message = f"Pagination timed out after {timeout_seconds} seconds" + super().__init__( + message=message, + developer_message=f"{message} while calling the tool {tool_name}", + ) + + +class RetryableToolError(Exception): + def __init__(self, message: str, additional_prompt_content: str = "", retry_after_ms: int = 1000, developer_message: str = ""): + super().__init__(message) + self.additional_prompt_content = additional_prompt_content + self.retry_after_ms = retry_after_ms + self.developer_message = developer_message + + +# Utility functions +def remove_none_values(data: dict[str, Any]) -> dict[str, Any]: + return {k: v for k, v in data.items() if v is not None} + + +def format_currency_from_cents(amount_cents: Optional[int], currency: str = "USD") -> Optional[str]: + """Convert cents to formatted currency string.""" + if amount_cents is None: + return None + amount_dollars = amount_cents / 100 + if currency == "USD": + return f"${amount_dollars:,.2f}" + return f"{amount_dollars:,.2f} {currency}" + + +def format_opportunity_values(opportunity: dict[str, Any]) -> dict[str, Any]: + """Format opportunity monetary values from cents to readable currency strings.""" + formatted_opp = opportunity.copy() + + # List of fields that contain monetary values in cents + money_fields = ['value', 'expected_value', 'annualized_value', 'annualized_expected_value'] + + for field in money_fields: + if field in formatted_opp and formatted_opp[field] is not None: + # Store original value with _cents suffix for reference + formatted_opp[f"{field}_cents"] = formatted_opp[field] + # Replace with formatted dollar amount + currency = formatted_opp.get('value_currency', 'USD') + formatted_opp[field] = format_currency_from_cents(formatted_opp[field], currency) + + return formatted_opp + + +def format_leads_response(response: dict[str, Any]) -> dict[str, Any]: + """Format lead response to convert opportunity values from cents to dollars.""" + formatted_response = response.copy() + + if 'leads' in formatted_response: + formatted_leads = [] + for lead in formatted_response['leads']: + formatted_lead = lead.copy() + if 'opportunities' in formatted_lead: + formatted_opportunities = [] + for opp in formatted_lead['opportunities']: + formatted_opportunities.append(format_opportunity_values(opp)) + formatted_lead['opportunities'] = formatted_opportunities + formatted_leads.append(formatted_lead) + formatted_response['leads'] = formatted_leads + + return formatted_response + + +def get_next_page(response: dict[str, Any]) -> dict[str, Any]: + """Extract next page information from response.""" + has_more = response.get("has_more", False) + next_cursor = response.get("next_cursor") + return { + "has_more": has_more, + "next_cursor": next_cursor + } + + +# Decorator function to clean Close response +def clean_close_response(func): + def response_cleaner(data: dict[str, Any]) -> dict[str, Any]: + # Close API uses 'id' natively, no need to convert like Asana's 'gid' + # But we can clean up other response format inconsistencies if needed + + for k, v in data.items(): + if isinstance(v, dict): + data[k] = response_cleaner(v) + elif isinstance(v, list): + data[k] = [ + item if not isinstance(item, dict) else response_cleaner(item) for item in v + ] + + return data + + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + response = await func(*args, **kwargs) + return response_cleaner(response) + + return wrapper + + +@dataclass +class CloseClient: + access_token: str + base_url: str = CLOSE_BASE_URL + api_version: str = CLOSE_API_VERSION + max_concurrent_requests: int = CLOSE_MAX_CONCURRENT_REQUESTS + _semaphore: asyncio.Semaphore | None = None + + def __post_init__(self) -> None: + self._semaphore = self._semaphore or asyncio.Semaphore(self.max_concurrent_requests) + + def _build_url(https://github.com/self,%20endpoint:%20str,%20api_version:%20str%20|%20None%20=%20None) -> str: + api_version = api_version or self.api_version + return f"{self.base_url.rstrip('/')}/{api_version.strip('/')}/{endpoint.lstrip('/')}" + + def _build_auth_header(self) -> str: + """Create Bearer Auth header for Close API.""" + return f"Bearer {self.access_token}" + + def _build_error_messages(self, response: httpx.Response) -> tuple[str, str]: + try: + data = response.json() + + if "error" in data: + error_message = data["error"] + developer_message = f"{error_message} (HTTP status code: {response.status_code})" + elif "errors" in data: + errors = data["errors"] + if len(errors) == 1: + error_message = errors[0] + developer_message = f"{error_message} (HTTP status code: {response.status_code})" + else: + errors_concat = "', '".join(errors) + error_message = f"Multiple errors occurred: '{errors_concat}'" + developer_message = f"Multiple errors occurred: {json.dumps(errors)} (HTTP status code: {response.status_code})" + else: + error_message = f"HTTP {response.status_code} error" + developer_message = f"HTTP {response.status_code} error: {response.text}" + + except Exception as e: + error_message = "Failed to parse Close error response" + developer_message = f"Failed to parse Close error response: {type(e).__name__}: {e!s}" + + return error_message, developer_message + + def _raise_for_status(self, response: httpx.Response) -> None: + if response.status_code < 300: + return + + error_message, developer_message = self._build_error_messages(response) + + raise CloseToolExecutionError(error_message, developer_message) + + def _set_request_body(self, kwargs: dict, data: dict | None, json_data: dict | None) -> dict: + if data and json_data: + raise ValueError("Cannot provide both data and json_data") + + if data: + kwargs["data"] = data + + elif json_data: + kwargs["json"] = json_data + + return kwargs + + @clean_close_response + async def get( + self, + endpoint: str, + params: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + default_headers = { + "Authorization": self._build_auth_header(), + "Accept": "application/json", + } + headers = {**default_headers, **(headers or {})} + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + "timeout": CLOSE_MAX_TIMEOUT_SECONDS, + } + + if params: + kwargs["params"] = params + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.get(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + @clean_close_response + async def post( + self, + endpoint: str, + data: Optional[dict] = None, + json_data: Optional[dict] = None, + files: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + default_headers = { + "Authorization": self._build_auth_header(), + "Accept": "application/json", + } + + if files is None and json_data is not None: + default_headers["Content-Type"] = "application/json" + + headers = {**default_headers, **(headers or {})} + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + "timeout": CLOSE_MAX_TIMEOUT_SECONDS, + } + + if files is not None: + kwargs["files"] = files + if data is not None: + kwargs["data"] = data + else: + kwargs = self._set_request_body(kwargs, data, json_data) + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.post(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + @clean_close_response + async def put( + self, + endpoint: str, + data: Optional[dict] = None, + json_data: Optional[dict] = None, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + headers = headers or {} + headers["Authorization"] = self._build_auth_header() + headers["Content-Type"] = "application/json" + headers["Accept"] = "application/json" + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + "timeout": CLOSE_MAX_TIMEOUT_SECONDS, + } + + kwargs = self._set_request_body(kwargs, data, json_data) + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.put(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + return cast(dict, response.json()) + + @clean_close_response + async def delete( + self, + endpoint: str, + headers: Optional[dict] = None, + api_version: str | None = None, + ) -> dict: + headers = headers or {} + headers["Authorization"] = self._build_auth_header() + headers["Accept"] = "application/json" + + kwargs = { + "url": self._build_url(https://github.com/endpoint,%20api_version), + "headers": headers, + "timeout": CLOSE_MAX_TIMEOUT_SECONDS, + } + + async with self._semaphore, httpx.AsyncClient() as client: # type: ignore[union-attr] + response = await client.delete(**kwargs) # type: ignore[arg-type] + self._raise_for_status(response) + + # Some DELETE responses may be empty + if response.text: + return cast(dict, response.json()) + return {} + + async def get_current_user(self) -> dict: + response = await self.get("/me/") + return cast(dict, response) + + +def get_close_client() -> CloseClient: + access_token = get_auth_token() + logger.info(f"Access Token: {access_token}") + return CloseClient(access_token=access_token) + + +def get_auth_token() -> str: + """Get the Close access token from the current context.""" + try: + return auth_token_context.get() + except LookupError: + raise CloseToolExecutionError( + "Authentication required. Please provide a Close access token.", + "No Close access token found in request context. The access token should be provided in the Authorization header." + ) \ No newline at end of file diff --git a/mcp_servers/close/tools/constants.py b/mcp_servers/close/tools/constants.py new file mode 100644 index 00000000..91b56f89 --- /dev/null +++ b/mcp_servers/close/tools/constants.py @@ -0,0 +1,55 @@ +# Constants for Close CRM API + +# API Configuration +CLOSE_API_VERSION = "v1" +CLOSE_BASE_URL = "https://api.close.com/api" +CLOSE_MAX_CONCURRENT_REQUESTS = 10 +CLOSE_MAX_TIMEOUT_SECONDS = 120 + +# Common field limits +CLOSE_MAX_LIMIT = 200 + +# Lead Status Types (common ones) +class LeadStatus: + LEAD = "lead" + QUALIFIED = "qualified" + CUSTOMER = "customer" + CANCELLED = "cancelled" + POTENTIAL = "potential" + +# Task Types +class TaskType: + LEAD = "lead" + INCOMING_EMAIL = "incoming_email" + EMAIL_FOLLOWUP = "email_followup" + MISSED_CALL = "missed_call" + VOICEMAIL = "voicemail" + OPPORTUNITY_DUE = "opportunity_due" + INCOMING_SMS = "incoming_sms" + +# Sort Orders +class SortOrder: + ASC = "asc" + DESC = "desc" + +# Object Types for Activities +class ActivityType: + CALL = "call" + EMAIL = "email" + EMAIL_THREAD = "emailthread" + NOTE = "note" + SMS = "sms" + MEETING = "meeting" + CREATED = "created" + +# Opportunity Status Types (common ones) +class OpportunityStatus: + ACTIVE = "active" + WON = "won" + LOST = "lost" + +# Contact Field Types +class ContactFieldType: + PHONE = "phone" + EMAIL = "email" + URL = "url" \ No newline at end of file diff --git a/mcp_servers/close/tools/contacts.py b/mcp_servers/close/tools/contacts.py new file mode 100644 index 00000000..3c2733d1 --- /dev/null +++ b/mcp_servers/close/tools/contacts.py @@ -0,0 +1,140 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import ( + CloseToolExecutionError, + ToolResponse, + get_close_client, + remove_none_values, +) +from .constants import CLOSE_MAX_LIMIT + +logger = logging.getLogger(__name__) + + +async def list_contacts( + limit: Optional[int] = None, + skip: Optional[int] = None, + lead_id: Optional[str] = None, + query: Optional[str] = None, + **kwargs +) -> ToolResponse: + """List contacts from Close CRM.""" + + client = get_close_client() + + params = remove_none_values({ + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 100, + "_skip": skip, + "lead_id": lead_id, + "query": query, + }) + + response = await client.get("/contact/", params=params) + + return { + "contacts": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + +async def get_contact(contact_id: str) -> ToolResponse: + """Get a specific contact by ID.""" + + client = get_close_client() + + response = await client.get(f"/contact/{contact_id}/") + + return response + + +async def create_contact( + lead_id: str, + name: Optional[str] = None, + title: Optional[str] = None, + emails: Optional[List[Dict[str, Any]]] = None, + phones: Optional[List[Dict[str, Any]]] = None, + urls: Optional[List[Dict[str, Any]]] = None, + **custom_fields +) -> ToolResponse: + """Create a new contact in Close CRM.""" + + client = get_close_client() + + contact_data = remove_none_values({ + "lead_id": lead_id, + "name": name, + "title": title, + "emails": emails, + "phones": phones, + "urls": urls, + **custom_fields + }) + + response = await client.post("/contact/", json_data=contact_data) + + return response + + +async def update_contact( + contact_id: str, + name: Optional[str] = None, + title: Optional[str] = None, + emails: Optional[List[Dict[str, Any]]] = None, + phones: Optional[List[Dict[str, Any]]] = None, + urls: Optional[List[Dict[str, Any]]] = None, + **custom_fields +) -> ToolResponse: + """Update an existing contact.""" + + client = get_close_client() + + contact_data = remove_none_values({ + "name": name, + "title": title, + "emails": emails, + "phones": phones, + "urls": urls, + **custom_fields + }) + + if not contact_data: + raise CloseToolExecutionError("No update data provided") + + response = await client.put(f"/contact/{contact_id}/", json_data=contact_data) + + return response + + +async def delete_contact(contact_id: str) -> ToolResponse: + """Delete a contact.""" + + client = get_close_client() + + response = await client.delete(f"/contact/{contact_id}/") + + return {"success": True, "contact_id": contact_id} + + +async def search_contacts( + query: str, + limit: Optional[int] = None, + **kwargs +) -> ToolResponse: + """Search for contacts using Close CRM search.""" + + client = get_close_client() + + params = remove_none_values({ + "query": query, + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 25, + }) + + response = await client.get("/contact/", params=params) + + return { + "contacts": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } \ No newline at end of file diff --git a/mcp_servers/close/tools/leads.py b/mcp_servers/close/tools/leads.py new file mode 100644 index 00000000..6acc0d82 --- /dev/null +++ b/mcp_servers/close/tools/leads.py @@ -0,0 +1,173 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import ( + CloseToolExecutionError, + ToolResponse, + get_close_client, + remove_none_values, + format_leads_response, +) +from .constants import CLOSE_MAX_LIMIT + +logger = logging.getLogger(__name__) + + +async def list_leads( + limit: Optional[int] = None, + skip: Optional[int] = None, + query: Optional[str] = None, + status_id: Optional[str] = None, + **kwargs +) -> ToolResponse: + """List leads from Close CRM.""" + + client = get_close_client() + + params = remove_none_values({ + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 100, + "_skip": skip, + "query": query, + "status_id": status_id, + }) + + response = await client.get("/lead/", params=params) + + result = { + "leads": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + return format_leads_response(result) + + +async def get_lead(lead_id: str, fields: Optional[str] = None) -> ToolResponse: + """Get a specific lead by ID.""" + + client = get_close_client() + + params = remove_none_values({ + "_fields": fields, + }) + + response = await client.get(f"/lead/{lead_id}/", params=params) + + # Format opportunities if they exist in the lead + if 'opportunities' in response: + result = {"leads": [response]} + formatted = format_leads_response(result) + return formatted["leads"][0] + + return response + + +async def create_lead( + name: str, + description: Optional[str] = None, + status_id: Optional[str] = None, + contacts: Optional[List[Dict[str, Any]]] = None, + addresses: Optional[List[Dict[str, Any]]] = None, + url: Optional[str] = None, + **custom_fields +) -> ToolResponse: + """Create a new lead in Close CRM.""" + + client = get_close_client() + + lead_data = remove_none_values({ + "name": name, + "description": description, + "status_id": status_id, + "contacts": contacts, + "addresses": addresses, + "url": url, + **custom_fields + }) + + response = await client.post("/lead/", json_data=lead_data) + + return response + + +async def update_lead( + lead_id: str, + name: Optional[str] = None, + description: Optional[str] = None, + status_id: Optional[str] = None, + url: Optional[str] = None, + **custom_fields +) -> ToolResponse: + """Update an existing lead.""" + + client = get_close_client() + + lead_data = remove_none_values({ + "name": name, + "description": description, + "status_id": status_id, + "url": url, + **custom_fields + }) + + if not lead_data: + raise CloseToolExecutionError("No update data provided") + + response = await client.put(f"/lead/{lead_id}/", json_data=lead_data) + + return response + + +async def delete_lead(lead_id: str) -> ToolResponse: + """Delete a lead.""" + + client = get_close_client() + + response = await client.delete(f"/lead/{lead_id}/") + + return {"success": True, "lead_id": lead_id} + + +async def search_leads( + query: str, + limit: Optional[int] = None, + fields: Optional[str] = None, + **kwargs +) -> ToolResponse: + """Search for leads using Close CRM search.""" + + client = get_close_client() + + params = remove_none_values({ + "query": query, + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 25, + "_fields": fields, + }) + + response = await client.get("/lead/", params=params) + + result = { + "leads": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + return format_leads_response(result) + + +async def merge_leads( + source_lead_id: str, + destination_lead_id: str +) -> ToolResponse: + """Merge two leads.""" + + client = get_close_client() + + merge_data = { + "source": source_lead_id, + "destination": destination_lead_id, + } + + response = await client.post("/lead/merge/", json_data=merge_data) + + return response \ No newline at end of file diff --git a/mcp_servers/close/tools/opportunities.py b/mcp_servers/close/tools/opportunities.py new file mode 100644 index 00000000..8208bb59 --- /dev/null +++ b/mcp_servers/close/tools/opportunities.py @@ -0,0 +1,155 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import ( + CloseToolExecutionError, + ToolResponse, + get_close_client, + remove_none_values, + format_opportunity_values, +) +from .constants import CLOSE_MAX_LIMIT + +logger = logging.getLogger(__name__) + + +async def list_opportunities( + limit: Optional[int] = None, + skip: Optional[int] = None, + lead_id: Optional[str] = None, + status_id: Optional[str] = None, + **kwargs +) -> ToolResponse: + """List opportunities from Close CRM.""" + + client = get_close_client() + + params = remove_none_values({ + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 100, + "_skip": skip, + "lead_id": lead_id, + "status_id": status_id, + }) + + response = await client.get("/opportunity/", params=params) + + # Format monetary values in opportunities + formatted_opportunities = [] + for opp in response.get("data", []): + formatted_opportunities.append(format_opportunity_values(opp)) + + return { + "opportunities": formatted_opportunities, + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + +async def get_opportunity(opportunity_id: str) -> ToolResponse: + """Get a specific opportunity by ID.""" + + client = get_close_client() + + response = await client.get(f"/opportunity/{opportunity_id}/") + + return format_opportunity_values(response) + + +async def create_opportunity( + lead_id: str, + note: Optional[str] = None, + confidence: Optional[int] = None, + value: Optional[float] = None, + value_period: Optional[str] = None, + status_id: Optional[str] = None, + expected_date: Optional[str] = None, + **custom_fields +) -> ToolResponse: + """Create a new opportunity in Close CRM.""" + + client = get_close_client() + + opportunity_data = remove_none_values({ + "lead_id": lead_id, + "note": note, + "confidence": confidence, + "value": value, + "value_period": value_period, + "status_id": status_id, + "expected_date": expected_date, + **custom_fields + }) + + response = await client.post("/opportunity/", json_data=opportunity_data) + + return format_opportunity_values(response) + + +async def update_opportunity( + opportunity_id: str, + note: Optional[str] = None, + confidence: Optional[int] = None, + value: Optional[float] = None, + value_period: Optional[str] = None, + status_id: Optional[str] = None, + expected_date: Optional[str] = None, + **custom_fields +) -> ToolResponse: + """Update an existing opportunity.""" + + client = get_close_client() + + opportunity_data = remove_none_values({ + "note": note, + "confidence": confidence, + "value": value, + "value_period": value_period, + "status_id": status_id, + "expected_date": expected_date, + **custom_fields + }) + + if not opportunity_data: + raise CloseToolExecutionError("No update data provided") + + response = await client.put(f"/opportunity/{opportunity_id}/", json_data=opportunity_data) + + return format_opportunity_values(response) + + +async def delete_opportunity(opportunity_id: str) -> ToolResponse: + """Delete an opportunity.""" + + client = get_close_client() + + response = await client.delete(f"/opportunity/{opportunity_id}/") + + return {"success": True, "opportunity_id": opportunity_id} + + +async def search_opportunities( + query: str, + limit: Optional[int] = None, + **kwargs +) -> ToolResponse: + """Search for opportunities using Close CRM search.""" + + client = get_close_client() + + params = remove_none_values({ + "query": query, + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 25, + }) + + response = await client.get("/opportunity/", params=params) + + # Format monetary values in opportunities + formatted_opportunities = [] + for opp in response.get("data", []): + formatted_opportunities.append(format_opportunity_values(opp)) + + return { + "opportunities": formatted_opportunities, + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } \ No newline at end of file diff --git a/mcp_servers/close/tools/tasks.py b/mcp_servers/close/tools/tasks.py new file mode 100644 index 00000000..038f3fbc --- /dev/null +++ b/mcp_servers/close/tools/tasks.py @@ -0,0 +1,172 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import ( + CloseToolExecutionError, + ToolResponse, + get_close_client, + remove_none_values, +) +from .constants import CLOSE_MAX_LIMIT, TaskType + +logger = logging.getLogger(__name__) + + +async def list_tasks( + limit: Optional[int] = None, + skip: Optional[int] = None, + lead_id: Optional[str] = None, + assigned_to: Optional[str] = None, + is_complete: Optional[bool] = None, + task_type: Optional[str] = None, + view: Optional[str] = None, + **kwargs +) -> ToolResponse: + """List tasks from Close CRM.""" + + client = get_close_client() + + params = remove_none_values({ + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 100, + "_skip": skip, + "lead_id": lead_id, + "assigned_to": assigned_to, + "is_complete": is_complete, + "_type": task_type, + "view": view, + }) + + response = await client.get("/task/", params=params) + + return { + "tasks": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + +async def get_task(task_id: str) -> ToolResponse: + """Get a specific task by ID.""" + + client = get_close_client() + + response = await client.get(f"/task/{task_id}/") + + return response + + +async def create_task( + lead_id: str, + text: str, + assigned_to: Optional[str] = None, + date: Optional[str] = None, + is_complete: Optional[bool] = None, + **kwargs +) -> ToolResponse: + """Create a new task in Close CRM.""" + + client = get_close_client() + + task_data = remove_none_values({ + "lead_id": lead_id, + "text": text, + "assigned_to": assigned_to, + "date": date, + "is_complete": is_complete or False, + }) + + response = await client.post("/task/", json_data=task_data) + + return response + + +async def update_task( + task_id: str, + text: Optional[str] = None, + assigned_to: Optional[str] = None, + date: Optional[str] = None, + is_complete: Optional[bool] = None, + **kwargs +) -> ToolResponse: + """Update an existing task.""" + + client = get_close_client() + + task_data = remove_none_values({ + "text": text, + "assigned_to": assigned_to, + "date": date, + "is_complete": is_complete, + }) + + if not task_data: + raise CloseToolExecutionError("No update data provided") + + response = await client.put(f"/task/{task_id}/", json_data=task_data) + + return response + + +async def delete_task(task_id: str) -> ToolResponse: + """Delete a task.""" + + client = get_close_client() + + response = await client.delete(f"/task/{task_id}/") + + return {"success": True, "task_id": task_id} + + +async def bulk_update_tasks( + task_ids: List[str], + assigned_to: Optional[str] = None, + date: Optional[str] = None, + is_complete: Optional[bool] = None, + **kwargs +) -> ToolResponse: + """Bulk update multiple tasks.""" + + client = get_close_client() + + task_data = remove_none_values({ + "assigned_to": assigned_to, + "date": date, + "is_complete": is_complete, + }) + + if not task_data: + raise CloseToolExecutionError("No update data provided") + + # Use the id__in filter for bulk operations + params = { + "id__in": ",".join(task_ids) + } + + response = await client.put("/task/", params=params, json_data=task_data) + + return response + + +async def search_tasks( + query: str, + limit: Optional[int] = None, + task_type: Optional[str] = None, + **kwargs +) -> ToolResponse: + """Search for tasks using Close CRM search.""" + + client = get_close_client() + + params = remove_none_values({ + "query": query, + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 25, + "_type": task_type, + }) + + response = await client.get("/task/", params=params) + + return { + "tasks": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } \ No newline at end of file diff --git a/mcp_servers/close/tools/users.py b/mcp_servers/close/tools/users.py new file mode 100644 index 00000000..64e56b0c --- /dev/null +++ b/mcp_servers/close/tools/users.py @@ -0,0 +1,78 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import ( + CloseToolExecutionError, + ToolResponse, + get_close_client, + remove_none_values, +) +from .constants import CLOSE_MAX_LIMIT + +logger = logging.getLogger(__name__) + + +async def get_current_user() -> ToolResponse: + """Get the current user information.""" + + client = get_close_client() + + response = await client.get_current_user() + + return response + + +async def list_users( + limit: Optional[int] = None, + skip: Optional[int] = None, + **kwargs +) -> ToolResponse: + """List users from Close CRM.""" + + client = get_close_client() + + params = remove_none_values({ + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 100, + "_skip": skip, + }) + + response = await client.get("/user/", params=params) + + return { + "users": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } + + +async def get_user(user_id: str) -> ToolResponse: + """Get a specific user by ID.""" + + client = get_close_client() + + response = await client.get(f"/user/{user_id}/") + + return response + + +async def search_users( + query: str, + limit: Optional[int] = None, + **kwargs +) -> ToolResponse: + """Search for users using Close CRM search.""" + + client = get_close_client() + + params = remove_none_values({ + "query": query, + "_limit": min(limit, CLOSE_MAX_LIMIT) if limit else 25, + }) + + response = await client.get("/user/", params=params) + + return { + "users": response.get("data", []), + "has_more": response.get("has_more", False), + "total_results": response.get("total_results"), + } \ No newline at end of file diff --git a/mcp_servers/coinbase/.env.example b/mcp_servers/coinbase/.env.example new file mode 100644 index 00000000..9b52ba3a --- /dev/null +++ b/mcp_servers/coinbase/.env.example @@ -0,0 +1,5 @@ +COINBASE_API_KEY=YOUR_API_KEY +COINBASE_API_SECRET="YOUR_API_SECRET" +COINBASE_MCP_SERVER_PORT=5000 +COINBASE_API_BASE_URL=https://api.coinbase.com/v2 +COINBASE_EXCHANGE_URL="https://api.exchange.coinbase.com/%20No%20newline%20at%20end%20of%20filediff%20--git%20a/mcp_servers/coinbase/Dockerfile%20b/mcp_servers/coinbase/Dockerfilenew%20file%20mode%20100644index%2000000000..0940a4e8---%20/dev/null+++%20b/mcp_servers/coinbase/Dockerfile@@%20-0,0%20+1,23%20@@+FROM%20python:3.12-slim++WORKDIR%20/app++#%20Install%20system%20dependencies+RUN%20apt-get%20update%20&&%20apt-get%20install%20-y%20--no-install-recommends%20\+%20%20%20%20gcc%20\+%20%20%20%20&&%20rm%20-rf%20/var/lib/apt/lists/*++#%20Copy%20only%20the%20requirements%20first%20to%20leverage%20Docker%20cache+COPY%20mcp_servers/coinbase/requirements.txt%20.+RUN%20pip%20install%20--no-cache-dir%20-r%20requirements.txt++#%20Copy%20the%20server%20code+COPY%20mcp_servers/coinbase/server.py%20.+COPY%20mcp_servers/coinbase/tools/%20./tools/+COPY%20mcp_servers/coinbase/utils/%20./utils/++#%20Expose%20the%20port%20the%20server%20runs%20on+EXPOSE%205000++#%20Command%20to%20run%20the%20server+CMD%20["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/coinbase/README.md b/mcp_servers/coinbase/README.md new file mode 100644 index 00000000..cfb4f386 --- /dev/null +++ b/mcp_servers/coinbase/README.md @@ -0,0 +1,78 @@ +# Coinbase MCP Server + +A Model Context Protocol (MCP) server for Coinbase integration. Manage cryptocurrency accounts, transactions, and trading using Coinbase's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Coinbase with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("COINBASE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/coinbase-mcp-server:latest + + +# Run Coinbase MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/coinbase-mcp-server:latest + + +# Run Coinbase MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_coinbase_access_token_here"}' \ + ghcr.io/klavis-ai/coinbase-mcp-server:latest +``` + +**OAuth Setup:** Coinbase requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Account Management**: View cryptocurrency account balances and details +- **Transaction History**: Access transaction records and payment history +- **Price Data**: Get real-time cryptocurrency prices and market data +- **Wallet Operations**: Manage cryptocurrency wallets and addresses +- **Trading Info**: Access trading information and market insights + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/coinbase/requirements.txt b/mcp_servers/coinbase/requirements.txt new file mode 100644 index 00000000..1abe4bda --- /dev/null +++ b/mcp_servers/coinbase/requirements.txt @@ -0,0 +1,7 @@ +mcp[cli]==1.11.0 +aiohttp +python-dotenv==1.0.0 +uvicorn[standard] +starlette +click +coinbase-advanced-py \ No newline at end of file diff --git a/mcp_servers/coinbase/server.py b/mcp_servers/coinbase/server.py new file mode 100644 index 00000000..1c9f4cf6 --- /dev/null +++ b/mcp_servers/coinbase/server.py @@ -0,0 +1,612 @@ +import contextlib +import base64 +import json +import logging +import os + +from collections.abc import AsyncIterator +from typing import List + +import click +import mcp.types as types + +from dotenv import load_dotenv +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + auth_token_context, + coinbase_get_prices, + coinbase_get_current_exchange_rate, + coinbase_get_accounts, + coinbase_get_account_balance, + coinbase_get_transactions, + coinbase_get_portfolio_value, + coinbase_get_product_details, + coinbase_get_historical_prices, +) + +# Configure logging +logger = logging.getLogger(__name__) + + +load_dotenv() + +COINBASE_MCP_SERVER_PORT = int(os.getenv("COINBASE_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + + +@click.command() +@click.option( + "--port", + default=COINBASE_MCP_SERVER_PORT, + help="Port to listen on for HTTP" +) +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # MCP server instance + app = Server("coinbase-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Market Data Tools (Coinbase APP) + types.Tool( + name="coinbase_get_prices", + description=""" + Get current prices for specific cryptocurrencies. + Rate limited: Conservative rate limits apply. + + Typical use: get current prices for specific cryptocurrencies. + """, + inputSchema={ + "type": "object", + "properties": { + "symbols": { + "type": "array", + "items": {"type": "string"}, + "description": "Required. List of cryptocurrency symbols in BASE-QUOTE format(e.g., ['BTC-USD', 'ETH-USD']). Must be valid Coinbase trading pairs." + } + }, + "required": ["symbols"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_MARKET", "readOnlyHint": True} + ), + ), + types.Tool( + name="coinbase_get_current_exchange_rate", + description=""" + Get current exchange rate for specific cryptocurrencies. + Rate limited: Conservative rate limits apply. + + Typical use: get current exchange rate for specific cryptocurrencies. + """, + inputSchema={ + "type": "object", + "properties": { + "symbols": { + "type": "array", + "items": {"type": "string"}, + "description": "Required. List of cryptocurrency symbols (e.g., ['BTC', 'ETH'])." + } + }, + "required": ["symbols"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_MARKET", "readOnlyHint": True} + ), + ), + types.Tool( + name="coinbase_get_historical_prices", + description=""" + Get historical price data for cryptocurrencies. + Rate limited: Conservative rate limits apply. + + Typical use: get price history with different timeframes for analysis and charts. + Returns OHLCV (Open, High, Low, Close, Volume) candlestick data. + """, + inputSchema={ + "type": "object", + "properties": { + "symbol": { + "type": "string", + "description": "Required. Trading pair symbol (e.g., 'BTC-USD')." + }, + "start": { + "type": "string", + "description": "Required. Start time in ISO 8601 format (e.g., '2024-01-01T00:00:00Z')." + }, + "end": { + "type": "string", + "description": "Required. End time in ISO 8601 format (e.g., '2024-12-31T23:59:59Z')." + }, + "granularity": { + "type": "integer", + "description": "Candle granularity in seconds. Options: 60, 300, 900, 3600, 21600, 86400." + } + }, + "required": ["symbol", "start", "end"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_MARKET", "readOnlyHint": True} + ), + ), + # Account & Portfolio Tools (requires API key) + types.Tool( + name="coinbase_get_accounts", + description=""" + List user's cryptocurrency accounts. + Rate limited: Conservative rate limits for account endpoints. + + Requires Coinbase API authentication. + """, + inputSchema={ + "type": "object", + "properties": {} + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_ACCOUNT", "readOnlyHint": True} + ), + ), + types.Tool( + name="coinbase_get_account_balance", + description=""" + Get balance for a specific account. + Rate limited: Conservative rate limits for account endpoints. + + Requires Coinbase API authentication. + """, + inputSchema={ + "type": "object", + "properties": { + "account_id": { + "type": "string", + "description": "Required. The account ID to get balance for." + } + }, + "required": ["account_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_ACCOUNT", "readOnlyHint": True} + ), + ), + types.Tool( + name="coinbase_get_transactions", + description=""" + Get transaction history for an account. + Pagination: Use 'before' and 'after' cursors for navigation. + Rate limited: Conservative rate limits for account endpoints. + + Requires Coinbase API authentication. + """, + inputSchema={ + "type": "object", + "properties": { + "account_id": { + "type": "string", + "description": "Required. The account ID to get transactions for." + }, + "limit": { + "type": "integer", + "description": "Number of transactions to return (max 100, default 25)." + }, + "before": { + "type": "string", + "description": "Optional. Return transactions before this cursor." + }, + "after": { + "type": "string", + "description": "Optional. Return transactions after this cursor." + } + }, + "required": ["account_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_ACCOUNT", "readOnlyHint": True} + ), + ), + types.Tool( + name="coinbase_get_portfolio_value", + description=""" + Get total portfolio value across all accounts. + + Requires Coinbase API authentication. + """, + inputSchema={ + "type": "object", + "properties": {} + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_ACCOUNT", "readOnlyHint": True} + ), + ), + # Product Information Tools (Cryptocurreny) + types.Tool( + name="coinbase_get_product_details", + description=""" + Get detailed cryptocurrency information. + + Typical use: get comprehensive details about a specific cryptocurrency. + """, + inputSchema={ + "type": "object", + "properties": { + "product_id": { + "type": "string", + "description": "Required. The product ID (e.g., 'BTC-USD')." + } + }, + "required": ["product_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "COINBASE_MARKET", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + if name == "coinbase_get_prices": + try: + symbols = arguments.get("symbols") + + if not symbols: + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: symbols.", + ) + ] + + result = await coinbase_get_prices(symbols) + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_current_exchange_rate": + try: + symbols = arguments.get("symbols") + + if not symbols: + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: symbols.", + ) + ] + + result = await coinbase_get_current_exchange_rate(symbols) + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_historical_prices": + try: + symbol = arguments.get("symbol") + start = arguments.get("start") + end = arguments.get("end") + granularity = arguments.get("granularity") + + if not all([symbol, start, end]): + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: symbol, start, end.", + ) + ] + + result = await coinbase_get_historical_prices( + symbol, + start, + end, + granularity + ) + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_accounts": + try: + result = await coinbase_get_accounts() + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_account_balance": + try: + account_id = arguments.get("account_id") + + if not account_id: + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: account_id.", + ) + ] + + result = await coinbase_get_account_balance(account_id) + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_transactions": + try: + account_id = arguments.get("account_id") + + if not account_id: + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: account_id.", + ) + ] + + result = await coinbase_get_transactions(account_id) + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_portfolio_value": + try: + result = await coinbase_get_portfolio_value() + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "coinbase_get_product_details": + try: + product_id = arguments.get("product_id") + + if not product_id: + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: product_id.", + ) + ] + + result = await coinbase_get_product_details(product_id) + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/coinbase/tools/__init__.py b/mcp_servers/coinbase/tools/__init__.py new file mode 100644 index 00000000..3d26e431 --- /dev/null +++ b/mcp_servers/coinbase/tools/__init__.py @@ -0,0 +1,32 @@ +from .base import ( + auth_token_context +) + +from .market_data import ( + coinbase_get_prices, + coinbase_get_current_exchange_rate, +) + +from .accounts import ( + coinbase_get_accounts, + coinbase_get_account_balance, + coinbase_get_transactions, + coinbase_get_portfolio_value +) + +from .products import ( + coinbase_get_product_details, + coinbase_get_historical_prices, +) + +__all__ = [ + "auth_token_context", + "coinbase_get_prices", + "coinbase_get_current_exchange_rate", + "coinbase_get_accounts", + "coinbase_get_account_balance", + "coinbase_get_transactions", + "coinbase_get_portfolio_value", + "coinbase_get_product_details", + "coinbase_get_historical_prices", +] diff --git a/mcp_servers/coinbase/tools/accounts.py b/mcp_servers/coinbase/tools/accounts.py new file mode 100644 index 00000000..ebf209f6 --- /dev/null +++ b/mcp_servers/coinbase/tools/accounts.py @@ -0,0 +1,125 @@ +import logging + +from typing import Any, Dict, Optional + +from .base import make_coinbase_request +from utils.rate_limiter import rate_limited + +# Configure logging +logger = logging.getLogger(__name__) + + +@rate_limited(api_type="accounts") +async def coinbase_get_accounts() -> Dict[str, Any]: + """ + List user's cryptocurrency accounts. + Uses conservative rate limits for authenticated account endpoints. + + Requires Coinbase API authentication. + Returns: + dict: JSON response with user accounts. + """ + endpoint = "/v2/accounts" + return await make_coinbase_request(method="GET", endpoint=endpoint) + + +@rate_limited(api_type="accounts") +async def coinbase_get_account_balance(account_id: str) -> Dict[str, Any]: + """ + Get balance for a specific account. + Uses conservative rate limits for authenticated account endpoints. + + Args: + account_id (str): The account ID to get balance for. + Returns: + dict: JSON response with account balance. + """ + endpoint = f"/v2/accounts/{account_id}" + + return await make_coinbase_request(method="GET", endpoint=endpoint) + + +@rate_limited(api_type="accounts") +async def coinbase_get_transactions( + account_id: str, + limit: Optional[int] = 25, + before: Optional[str] = None, + after: Optional[str] = None +) -> Dict[str, Any]: + """ + Get transaction history for an account. + Uses conservative rate limits for authenticated account endpoints. + + Args: + account_id (str): The account ID to get transactions for. + limit (int): Number of transactions to return (max 100, default 25). + before (str): Optional. Return transactions before this cursor. + after (str): Optional. Return transactions after this cursor. + Returns: + dict: JSON response with transaction history. + """ + # Base endpoint for JWT token generation (without query params) + endpoint = f"/v2/accounts/{account_id}/transactions" + + # Build query parameters list + params = [] + if limit: + params.append(f"limit={limit}") + if before: + params.append(f"before={before}") + if after: + params.append(f"after={after}") + + return await make_coinbase_request( + method="GET", + endpoint=endpoint, + query_params=params + ) + + +@rate_limited(api_type="accounts") +async def coinbase_get_portfolio_value() -> Dict[str, Any]: + """ + Get total portfolio value across all accounts. + Uses conservative rate limits for authenticated account endpoints. + + Requires Coinbase API authentication. + Returns: + dict: JSON response with portfolio value. + """ + try: + # First get all accounts + accounts_response = await coinbase_get_accounts() + if "error" in accounts_response: + return accounts_response + + accounts = accounts_response.get("data", []) + account_values = [] + + for account in accounts: + account_id = account.get("id") + + if account_id: + balance_response = await coinbase_get_account_balance(account_id) + + if "error" not in balance_response: + balance_data = balance_response.get("data", {}) + account_values.append({ + "account_id": account_id, + "currency": account.get("currency"), + "balance": balance_data.get("balance"), + }) + + return { + "data": { + "accounts": account_values, + "total_accounts": len(accounts) + } + } + except Exception as e: + logger.error( + f"Unexpected error in Coinbase portfolio value request: {e}" + ) + return { + "error": f"Unexpected error in Coinbase portfolio value request: {str(e)}" + } diff --git a/mcp_servers/coinbase/tools/base.py b/mcp_servers/coinbase/tools/base.py new file mode 100644 index 00000000..4a6bde88 --- /dev/null +++ b/mcp_servers/coinbase/tools/base.py @@ -0,0 +1,206 @@ +import logging +import os + +from contextvars import ContextVar +from typing import Any, Dict, List, Optional + +import aiohttp + +from coinbase import jwt_generator +from dotenv import load_dotenv + +# Load env vars from .env +load_dotenv() + +logger = logging.getLogger(__name__) + + +class CoinbaseValidationError(Exception): + """Custom exception for Airtable 422 validation errors.""" + + pass + + +# Context variable to store the auth token per request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + + +def get_auth_token() -> str: + """ + Get the Coinbase API token from context or fallback to env. + """ + try: + token = auth_token_context.get() + + if not token: + # Fallback to environment variable + token = os.getenv("COINBASE_API_KEY") + + if not token: + raise RuntimeError( + "No Coinbase auth token found in context or environment" + ) + + return token + except LookupError: + # Context variable not set at all + token = os.getenv("COINBASE_API_KEY") + + if not token: + raise RuntimeError( + "No Coinbase auth token found in context or environment" + ) + + return token + + +def generate_jwt_token(request_method: str, request_path: str) -> str: + """ + Generate JWT token for Coinbase API authentication using coinbase-advanced-py. + JWT tokens expire after 2 minutes and must be generated for each unique API request. + """ + try: + api_key = get_auth_token() + api_secret = os.getenv("COINBASE_API_SECRET") + + if not api_secret: + raise RuntimeError("No Coinbase API secret found") + + # Generate JWT token using coinbase-advanced-py library + jwt_uri = jwt_generator.format_jwt_uri(request_method, request_path) + + jwt_token = jwt_generator.build_rest_jwt( + jwt_uri, + api_key, + api_secret + ) + + return jwt_token + + except Exception as e: + logger.error(f"Failed to generate JWT token: {e}") + raise CoinbaseValidationError( + f"Failed to generate JWT token: {e}. Check your API key signature algorithm is ECDSA." + ) + + +def _get_coinbase_headers(request_method: str, request_path: str) -> Dict[str, str]: + """ + Get headers with JWT authentication for Coinbase API. + Generates a fresh JWT token for each request as required by Coinbase. + """ + try: + jwt_token = generate_jwt_token(request_method, request_path) + + return { + "Authorization": f"Bearer {jwt_token}", + "Content-Type": "application/json" + } + except CoinbaseValidationError as e: + raise e + except Exception as e: + logger.error(f"Failed to generate authenticated headers: {e}") + raise e + + +def get_coinbase_config() -> Optional[Dict[str, str]]: + """ + Return a Coinbase client config ready to use. + """ + try: + base_url = os.getenv("COINBASE_API_BASE_URL") + exchange_url = os.getenv("COINBASE_EXCHANGE_URL") + + if not base_url or not exchange_url: + raise RuntimeError( + "Missing Coinbase configuration: You need to set COINBASE_API_BASE_URL and COINBASE_EXCHANGE_URL environment variables." + ) + + return { + "base_url": base_url, + "exchange_url": exchange_url + } + except Exception as e: + logger.error(f"Failed to initialize Coinbase config: {e}") + raise RuntimeError(f"Failed to initialize Coinbase config: {e}") + + +async def make_coinbase_request( + method: str, + endpoint: str, + require_auth: bool = True, + query_params: Optional[List[str]] = None, + json_data: Optional[Dict[str, Any]] = None, + base_url: Optional[str] = None +) -> Dict[str, Any]: + """ + Make a centralized Coinbase API request with proper authentication and error handling. + Rate limiting is automatically applied via decorator. + + Args: + method (str): HTTP method (GET, POST, PUT, DELETE) + endpoint (str): API endpoint path (e.g., "/v2/accounts") - used for JWT generation + require_auth (bool): Whether to include JWT authentication (default: True) + query_params (Optional[List[str]]): List of query parameter strings (e.g., ["limit=5", "before=cursor"]) + json_data (Optional[Dict]): JSON data for POST/PUT requests + base_url (Optional[str]): Custom base URL to use instead of default (default: None) + + Returns: + Dict[str, Any]: API response or error dict + """ + try: + config = get_coinbase_config() + + # Set up headers based on authentication requirement + if require_auth: + # Generate authenticated headers using the base endpoint + headers = _get_coinbase_headers(method, endpoint) + else: + # Public endpoint - no authentication needed + headers = {"Content-Type": "application/json"} + + # Build full URL + if query_params: + endpoint += "?" + "&".join(query_params) + + # Choose the appropriate base URL + if base_url: + # Use custom base URL + url = f"{base_url}{endpoint}" + else: + # Use default base URL from config + url = f"{config['base_url']}{endpoint}" + + logger.info(f"Request: {method} {url}") + + async with aiohttp.ClientSession(headers=headers) as session: + try: + async with session.request(method, url, json=json_data) as response: + response.raise_for_status() + return await response.json() + except aiohttp.ClientResponseError as e: + logger.error( + f"Coinbase API request failed: {e.status} {e.message} for {method} {url}" + ) + return {"error": f"API request failed: {str(e)}"} + except Exception as e: + logger.error( + f"Unexpected error occurred during Coinbase API request: {e}" + ) + return {"error": f"Unexpected error: {str(e)}"} + except CoinbaseValidationError as e: + raise e + except aiohttp.ClientResponseError as e: + logger.error( + f"Coinbase API request failed: {e.status} {e.message} for {method} {url}" + ) + raise RuntimeError( + f"Coinbase API Error ({e.status}): {e.message}" + ) from e + except Exception as e: + logger.error( + f"An unexpected error occurred during Coinbase API request: {e}" + ) + raise RuntimeError( + f"Unexpected error during API call to {method} {url}" + ) from e diff --git a/mcp_servers/coinbase/tools/constants.py b/mcp_servers/coinbase/tools/constants.py new file mode 100644 index 00000000..3867fecf --- /dev/null +++ b/mcp_servers/coinbase/tools/constants.py @@ -0,0 +1,8 @@ +COINBASE_DEFAULT_RATE_LIMIT = 2 +COINBASE_MARKET_DATA_RATE_LIMIT = 2 +COINBASE_ACCOUNTS_RATE_LIMIT = 2 +COINBASE_PRODUCTS_RATE_LIMIT = 2 +COINBASE_MAX_RETRY_ATTEMPTS = 3 +COINBASE_INITIAL_DELAY = 1.0 +COINBASE_MAX_DELAY = 10.0 +COINBASE_BACKOFF_FACTOR = 2.0 diff --git a/mcp_servers/coinbase/tools/market_data.py b/mcp_servers/coinbase/tools/market_data.py new file mode 100644 index 00000000..8884b5cb --- /dev/null +++ b/mcp_servers/coinbase/tools/market_data.py @@ -0,0 +1,92 @@ +import logging + +from typing import Any, Dict, List + +from .base import make_coinbase_request +from utils.rate_limiter import rate_limited + +# Configure logging +logger = logging.getLogger(__name__) + + +@rate_limited(api_type="market_data") +async def coinbase_get_prices(symbols: List[str]) -> Dict[str, Any]: + """ + Get current prices for cryptocurrencies. + Uses higher rate limits for market data endpoints. + + Args: + symbols (List[str]): List of cryptocurrency symbols (e.g., ['BTC-USD', 'ETH-USD']). + Returns: + dict: JSON response with current prices. + """ + # Get prices for specific symbols + prices_data = [] + + for symbol in symbols: + try: + endpoint = f"/v2/prices/{symbol}/spot" + + result = await make_coinbase_request( + method="GET", + endpoint=endpoint, + require_auth=False + ) + + if "error" in result: + prices_data.append( + {"error": f"Could not get price for {symbol}: {result['error']}"} + ) + else: + prices_data.append(result) + except Exception as e: + logger.error(f"Coinbase price request failed for {symbol}: {e}") + prices_data.append( + {"error": f"Could not get price for {symbol}: {str(e)}"} + ) + + return {"data": prices_data} + + +@rate_limited(api_type="market_data") +async def coinbase_get_current_exchange_rate(symbols: List[str]) -> Dict[str, Any]: + """ + Get current exchange rate for a cryptocurrencies. + Uses higher rate limits for market data endpoints. + + Args: + symbols (List[str]): List[str]): List of cryptocurrency symbols (e.g., ['BTC-USD', 'ETH-USD']). + Returns: + dict: JSON response with current exchange rate. + """ + # Get exchange rates for specific symbols + exchange_rates_data = [] + + for symbol in symbols: + try: + endpoint = "/v2/exchange-rates" + query_params = [f"currency={symbol}"] + + result = await make_coinbase_request( + method="GET", + endpoint=endpoint, + query_params=query_params, + require_auth=False + ) + + if "error" in result: + exchange_rates_data.append( + {"error": f"Could not get exchange rate for {symbol}: {result['error']}"} + ) + else: + exchange_rates_data.append(result) + + except Exception as e: + logger.error( + f"Coinbase exchange rate request failed for {symbol}: {e}" + ) + exchange_rates_data.append( + {"error": f"Could not get exchange rate for {symbol}: {str(e)}"} + ) + + return {"data": exchange_rates_data} diff --git a/mcp_servers/coinbase/tools/products.py b/mcp_servers/coinbase/tools/products.py new file mode 100644 index 00000000..2f6071ca --- /dev/null +++ b/mcp_servers/coinbase/tools/products.py @@ -0,0 +1,94 @@ +import logging +import os + +from typing import Any, Dict, Optional + +from .base import make_coinbase_request +from utils.rate_limiter import rate_limited + +# Configure logging +logger = logging.getLogger(__name__) + +EXCHANGE_URL = os.getenv("COINBASE_EXCHANGE_URL") + + +@rate_limited(api_type="products") +async def coinbase_get_product_details(product_id: str) -> Dict[str, Any]: + """ + Get detailed product information. + Uses moderate rate limits for product information endpoints. + + Args: + product_id (str): The product ID (e.g., 'BTC-USD'). + Returns: + dict: JSON response with detailed product information. + """ + try: + # Check if exchange URL is configured + if not EXCHANGE_URL: + return {"error": "Exchange URL not configured"} + + # Use centralized request function with custom base URL (no auth required) + result = await make_coinbase_request( + method="GET", + endpoint=f"/products/{product_id}", + require_auth=False, + base_url=EXCHANGE_URL + ) + + return result + + except Exception as e: + logger.error(f"Coinbase product details request failed: {e}") + return { + "error": f"Could not get Coinbase product details for {product_id}: {str(e)}" + } + + +@rate_limited(api_type="products") +async def coinbase_get_historical_prices( + symbol: str, + start: str, + end: str, + granularity: Optional[int] = 3600 +) -> Dict[str, Any]: + """ + Get historical price data for cryptocurrencies. + Uses moderate rate limits for product information endpoints. + + Args: + symbol (str): Trading pair symbol (e.g., 'BTC-USD'). + start (str): Start time in ISO 8601 format (e.g., '2024-01-01T00:00:00Z'). + end (str): End time in ISO 8601 format (e.g., '2024-12-31T23:59:59Z'). + granularity (int): Candle granularity in seconds. Options: 60, 300, 900, 3600, 21600, 86400. + Returns: + dict: JSON response with historical price data. + """ + try: + # Check if exchange URL is configured + if not EXCHANGE_URL: + return {"error": "Exchange URL not configured"} + + # Build query parameters + params = [ + f"start={start}", + f"end={end}", + f"granularity={granularity}" + ] + + # Use centralized request function with custom base URL (no auth required) + result = await make_coinbase_request( + method="GET", + endpoint=f"/products/{symbol}/candles", + query_params=params, + require_auth=False, + base_url=EXCHANGE_URL + ) + + return result + + except Exception as e: + logger.error(f"Coinbase historical prices request failed: {e}") + return { + "error": f"Could not get Coinbase historical prices for {symbol}: {str(e)}" + } diff --git a/mcp_servers/coinbase/utils/__init__.py b/mcp_servers/coinbase/utils/__init__.py new file mode 100644 index 00000000..ed63e5ea --- /dev/null +++ b/mcp_servers/coinbase/utils/__init__.py @@ -0,0 +1,5 @@ +from .rate_limiter import rate_limited + +__all__ = [ + "rate_limited", +] \ No newline at end of file diff --git a/mcp_servers/coinbase/utils/rate_limiter.py b/mcp_servers/coinbase/utils/rate_limiter.py new file mode 100644 index 00000000..9069a07f --- /dev/null +++ b/mcp_servers/coinbase/utils/rate_limiter.py @@ -0,0 +1,307 @@ +import asyncio +import functools +import logging +import time + +from typing import Any, Callable, Optional +from contextlib import asynccontextmanager + +from tools.constants import ( + COINBASE_DEFAULT_RATE_LIMIT, + COINBASE_MARKET_DATA_RATE_LIMIT, + COINBASE_ACCOUNTS_RATE_LIMIT, + COINBASE_PRODUCTS_RATE_LIMIT, + COINBASE_MAX_RETRY_ATTEMPTS, + COINBASE_INITIAL_DELAY, + COINBASE_MAX_DELAY, + COINBASE_BACKOFF_FACTOR, +) + +logger = logging.getLogger(__name__) + + +_rate_limiters = {} + + +class RateLimitConfig: + """Configuration class for rate limiting settings.""" + + def __init__(self): + # Default rate limit for Coinbase API + self.default_max_requests_per_second = COINBASE_DEFAULT_RATE_LIMIT + + # API-specific rate limits with smart defaults + # Market API: 10,000 per hour = 2.78 per second, rounded down to 2 for safety + self.market_data_rate_limit = COINBASE_MARKET_DATA_RATE_LIMIT + + # Accounts API: 10,000 per hour = 2.78 per second, rounded down to 2 for safety + self.accounts_rate_limit = COINBASE_ACCOUNTS_RATE_LIMIT + + # Products API: 10 per second + self.products_rate_limit = COINBASE_PRODUCTS_RATE_LIMIT + + # Retry settings + self.max_retry_attempts = COINBASE_MAX_RETRY_ATTEMPTS + self.initial_delay = COINBASE_INITIAL_DELAY + self.max_delay = COINBASE_MAX_DELAY + self.backoff_factor = COINBASE_BACKOFF_FACTOR + + +class TokenBucketRateLimiter: + """ + Token Bucket Rate Limiter Implementation + + The token bucket algorithm allows for a burst of traffic up to the bucket capacity, + while maintaining a steady rate of token refill. + """ + + def __init__( + self, + tokens_per_second: int, + bucket_capacity: Optional[int] = None + ): + """ + Initialize token bucket rate limiter. + + Args: + tokens_per_second: Rate at which tokens are refilled + bucket_capacity: Maximum number of tokens in bucket (defaults to tokens_per_second) + """ + self.tokens_per_second = tokens_per_second + self.bucket_capacity = bucket_capacity or tokens_per_second + self.tokens = self.bucket_capacity # Start with full bucket + self.last_refill_time = time.time() + + def _refill_tokens(self): + """Refill tokens based on time elapsed since last refill.""" + current_time = time.time() + time_elapsed = current_time - self.last_refill_time + + # Calculate tokens to add + tokens_to_add = time_elapsed * self.tokens_per_second + + # Update tokens (don't exceed capacity) + self.tokens = min(self.bucket_capacity, self.tokens + tokens_to_add) + self.last_refill_time = current_time + + def try_consume_token(self) -> bool: + """ + Try to consume a token from the bucket. + + Returns: + True if token was consumed, False if bucket is empty + """ + self._refill_tokens() + + if self.tokens >= 1: + self.tokens -= 1 + return True + return False + + def get_wait_time(self) -> float: + """ + Calculate how long to wait before a token will be available. + + Returns: + Time in seconds to wait + """ + self._refill_tokens() + + if self.tokens >= 1: + return 0.0 + + # Calculate time until next token is available + tokens_needed = 1 - self.tokens + wait_time = tokens_needed / self.tokens_per_second + + return max(wait_time, 0.1) # Minimum 0.1 second wait + + +class RateLimiter: + def __init__( + self, + max_requests_per_second: Optional[int] = None + ): + self.max_requests_per_second = max_requests_per_second or config.default_max_requests_per_second + self.token_bucket = TokenBucketRateLimiter( + self.max_requests_per_second + ) + + def _is_rate_limited(self) -> bool: + """Check if we're currently rate limited using token bucket.""" + return not self.token_bucket.try_consume_token() + + def _calculate_wait_time(self) -> float: + """Calculate how long to wait before next request is allowed.""" + return self.token_bucket.get_wait_time() + + def _add_request(self): + """Record a new request (not needed for token bucket, but kept for compatibility).""" + pass + + def _calculate_delay(self, attempt: int) -> float: + """Calculate delay for retry with exponential backoff.""" + delay = min( + config.initial_delay * (config.backoff_factor ** (attempt - 1)), + config.max_delay + ) + return delay + + def _is_rate_limit_error(self, error: Exception) -> bool: + """Check if the error is a rate limit error.""" + error_str = str(error).lower() + return any(phrase in error_str for phrase in [ + 'rate limit', + '429', + 'too many requests', + 'quota exceeded', + 'throttled' + ]) + + async def _delay(self, seconds: float): + """Async delay function.""" + await asyncio.sleep(seconds) + + async def with_retry(self, operation: Callable, context: str = "API request") -> Any: + """ + Execute an operation with retry logic and rate limiting. + + Args: + operation: The async function to execute + context: Context string for logging + + Returns: + The result of the operation + """ + attempt = 1 + + while attempt <= config.max_retry_attempts: + try: + # Check rate limits before making request + if self._is_rate_limited(): + wait_time = self._calculate_wait_time() + logger.warning( + f"Rate limit active for {context}. Waiting {wait_time:.2f}s") + await self._delay(wait_time) + + # Record the request + self._add_request() + + # Execute the operation + return await operation() + + except Exception as error: + if self._is_rate_limit_error(error) and attempt < config.max_retry_attempts: + delay = self._calculate_delay(attempt) + logger.warning( + f"Rate limit hit for {context}. " + f"Attempt {attempt}/{config.max_retry_attempts}. " + f"Retrying in {delay:.2f}s" + ) + await self._delay(delay) + attempt += 1 + continue + else: + # Re-raise the error if it's not a rate limit error or we've exhausted retries + raise error + + # This should never be reached, but just in case + raise Exception( + f"Max retry attempts ({config.max_retry_attempts}) exceeded for {context}") + + @asynccontextmanager + async def rate_limited_operation(self, context: str = "API request"): + """ + Context manager for rate-limited operations. + + Args: + context: Context string for logging + + Yields: + None + """ + try: + # Check rate limits before starting + if self._is_rate_limited(): + wait_time = self._calculate_wait_time() + logger.warning( + f"Rate limit active for {context}. Waiting {wait_time:.2f}s") + await self._delay(wait_time) + + # Record the request + self._add_request() + + yield + + except Exception as error: + if self._is_rate_limit_error(error): + logger.error(f"Rate limit error in {context}: {error}") + raise error + + +def get_rate_limiter( + api_type: str = "default", + max_requests_per_second: Optional[int] = None +) -> RateLimiter: + """ + Get a rate limiter instance for a specific API type. + + Args: + api_type: Type of API (e.g., "market_data", "accounts", "products") + max_requests_per_second: Custom per-second limit for this API type + + Returns: + RateLimiter instance + """ + if max_requests_per_second is None: + if api_type == "market_data": + max_requests_per_second = config.market_data_rate_limit + elif api_type == "accounts": + max_requests_per_second = config.accounts_rate_limit + elif api_type == "products": + max_requests_per_second = config.products_rate_limit + else: + max_requests_per_second = config.default_max_requests_per_second + + if api_type not in _rate_limiters: + _rate_limiters[api_type] = RateLimiter( + max_requests_per_second=max_requests_per_second + ) + + return _rate_limiters[api_type] + + +def rate_limited( + api_type: str = "default", + max_requests_per_second: Optional[int] = None +): + """ + Decorator to apply rate limiting to async functions. + + Args: + api_type: Type of API for rate limiting configuration + max_requests_per_second: Custom per-second limit for this function + + Returns: + Decorated function with rate limiting + """ + def decorator(func): + @functools.wraps(func) + async def wrapper(*args, **kwargs): + rate_limiter = get_rate_limiter( + api_type=api_type, + max_requests_per_second=max_requests_per_second + ) + + async def operation(): + return await func(*args, **kwargs) + + context = f"{api_type} API call: {func.__name__}" + return await rate_limiter.with_retry(operation, context) + + return wrapper + return decorator + + +# Create config instance +config = RateLimitConfig() diff --git a/mcp_servers/confluence/.env.example b/mcp_servers/confluence/.env.example new file mode 100644 index 00000000..52442cc9 --- /dev/null +++ b/mcp_servers/confluence/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +CONFLUENCE_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/confluence/Dockerfile b/mcp_servers/confluence/Dockerfile new file mode 100644 index 00000000..399db1b7 --- /dev/null +++ b/mcp_servers/confluence/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/confluence/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/confluence/ . + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/confluence/README.md b/mcp_servers/confluence/README.md new file mode 100644 index 00000000..e1fe02e0 --- /dev/null +++ b/mcp_servers/confluence/README.md @@ -0,0 +1,78 @@ +# Confluence MCP Server + +A Model Context Protocol (MCP) server for Atlassian Confluence integration. Manage pages, spaces, and content using Confluence's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Confluence with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("CONFLUENCE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/confluence-mcp-server:latest + + +# Run Confluence MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/confluence-mcp-server:latest + + +# Run Confluence MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_confluence_api_token_here"}' \ + ghcr.io/klavis-ai/confluence-mcp-server:latest +``` + +**OAuth Setup:** Confluence requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Page Management**: Create, read, update, and delete Confluence pages +- **Space Operations**: Manage Confluence spaces and permissions +- **Content Search**: Search content across pages and spaces +- **Comments**: Add and manage page comments +- **Attachments**: Handle file attachments and media + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/confluence/__init__.py b/mcp_servers/confluence/__init__.py new file mode 100644 index 00000000..d4d78ed5 --- /dev/null +++ b/mcp_servers/confluence/__init__.py @@ -0,0 +1 @@ +# Confluence MCP Server Package \ No newline at end of file diff --git a/mcp_servers/confluence/client.py b/mcp_servers/confluence/client.py new file mode 100644 index 00000000..8256a647 --- /dev/null +++ b/mcp_servers/confluence/client.py @@ -0,0 +1,730 @@ +from enum import Enum +from typing import Any +from urllib.parse import parse_qs, urlparse +from contextvars import ContextVar +import logging + +import httpx +from errors import ToolExecutionError, AuthenticationError, TokenExpiredError, InvalidTokenError + +# Single auth token context for the entire application +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +from enums import BodyFormat, PageUpdateMode +from utils import ( + build_child_url, + build_hierarchy, + remove_none_values, +) + +# Set up logging +logger = logging.getLogger(__name__) + + +class ConfluenceAPIVersion(str, Enum): + V1 = "wiki/rest/api" + V2 = "wiki/api/v2" + + +class ConfluenceClient: + ACCESSIBLE_RESOURCES_URL = "https://api.atlassian.com/oauth/token/accessible-resources" + BASE_URL = "https://api.atlassian.com/ex/confluence" + + def __init__(self, token: str, api_version: ConfluenceAPIVersion): + self.token = token + self.api_version = api_version.value + self.cloud_id = None # Will be set lazily when first needed + + async def _get_cloud_id(self) -> str: + """ + Fetch the cloudId for .atlassian.net + using the OAuth2 3LO accessible-resources endpoint. + + For details on why this is necessary, see: https://developer.atlassian.com/cloud/oauth/getting-started/making-calls-to-api + """ + if not self.token or self.token.strip() == "": + raise InvalidTokenError( + message="No OAuth token provided", + developer_message="Please ensure you have a valid OAuth 2.0 access token. You may need to complete the OAuth authorization flow." + ) + + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/json" + } + + logger.info(f"Fetching cloud ID from accessible-resources endpoint") + + try: + async with httpx.AsyncClient() as client: + resp = await client.get(self.ACCESSIBLE_RESOURCES_URL, headers=headers) + resp.raise_for_status() + resp_json = resp.json() + + if len(resp_json) == 0: + raise ToolExecutionError( + message="No workspaces found for the authenticated user.", + developer_message="The OAuth token is valid but no Confluence workspaces are accessible. Ensure the user has access to at least one Confluence site." + ) + + cloud_id = resp_json[0].get("id") + logger.info(f"Successfully retrieved cloud ID: {cloud_id}") + return cloud_id + + except httpx.HTTPStatusError as e: + if e.response.status_code == 401: + logger.error(f"OAuth token authentication failed: {e}") + raise TokenExpiredError( + message="OAuth token is expired or invalid", + developer_message=f"Received 401 Unauthorized when accessing {self.ACCESSIBLE_RESOURCES_URL}. " + + "The OAuth token may have expired or been revoked. " + + "Please re-authenticate using the OAuth 2.0 flow to get a new access token. " + + "For refresh tokens, use the refresh token endpoint to get a new access token." + ) + elif e.response.status_code == 403: + logger.error(f"OAuth token insufficient permissions: {e}") + raise AuthenticationError( + message="OAuth token has insufficient permissions", + developer_message=f"Received 403 Forbidden when accessing {self.ACCESSIBLE_RESOURCES_URL}. " + + "The OAuth token may not have the required scopes. " + + "Ensure your OAuth app has the necessary Confluence scopes configured." + ) + else: + logger.error(f"HTTP error when fetching cloud ID: {e}") + raise ToolExecutionError( + message=f"Failed to fetch cloud ID: HTTP {e.response.status_code}", + developer_message=f"Unexpected HTTP error {e.response.status_code} when calling {self.ACCESSIBLE_RESOURCES_URL}: {e}" + ) + except httpx.RequestError as e: + logger.error(f"Network error when fetching cloud ID: {e}") + raise ToolExecutionError( + message="Network error when connecting to Atlassian API", + developer_message=f"Request error when calling {self.ACCESSIBLE_RESOURCES_URL}: {e}" + ) + except Exception as e: + logger.error(f"Unexpected error when fetching cloud ID: {e}") + raise ToolExecutionError( + message="Unexpected error when fetching cloud ID", + developer_message=f"Unexpected error in _get_cloud_id(): {type(e).__name__}: {e}" + ) + + async def _ensure_cloud_id(self) -> str: + """Ensure cloud_id is available, fetching it if necessary.""" + if self.cloud_id is None: + self.cloud_id = await self._get_cloud_id() + return self.cloud_id + + async def request(self, method: str, path: str, **kwargs: Any) -> Any: + cloud_id = await self._ensure_cloud_id() + + headers = { + "Accept": "application/json", + "Authorization": f"Bearer {self.token}", + } + + # Merge with any additional headers from kwargs + if 'headers' in kwargs: + headers.update(kwargs.pop('headers')) + + async with httpx.AsyncClient() as client: + url = f"{self.BASE_URL}/{cloud_id}/{self.api_version}/{path.lstrip('/')}" + logger.debug(f"Making {method} request to: {url}") + + try: + response = await client.request( + method, + url, + headers=headers, + **kwargs, + ) + response.raise_for_status() + return response.json() + except httpx.HTTPStatusError as e: + if e.response.status_code == 401: + logger.error(f"Authentication failed for API request: {e}") + raise TokenExpiredError( + message="OAuth token is expired or invalid", + developer_message=f"Received 401 Unauthorized when calling {url}. " + + "The OAuth token may have expired. Please re-authenticate." + ) + else: + logger.error(f"HTTP error in API request: {e}") + raise ToolExecutionError( + message=f"API request failed: HTTP {e.response.status_code}", + developer_message=f"HTTP {e.response.status_code} error when calling {url}: {e}" + ) + except Exception as e: + logger.error(f"Unexpected error in API request: {e}") + raise ToolExecutionError( + message="Unexpected error in API request", + developer_message=f"Unexpected error when calling {url}: {type(e).__name__}: {e}" + ) + + async def get(self, path: str, **kwargs: Any) -> Any: + return await self.request("GET", path, **kwargs) + + async def post(self, path: str, **kwargs: Any) -> Any: + return await self.request("POST", path, **kwargs) + + async def put(self, path: str, **kwargs: Any) -> Any: + return await self.request("PUT", path, **kwargs) + + async def delete(self, path: str, **kwargs: Any) -> Any: + return await self.request("DELETE", path, **kwargs) + + +class ConfluenceClientV1(ConfluenceClient): + def __init__(self): + auth_token = auth_token_context.get() + if not auth_token: + logger.warning("No auth token found in context") + super().__init__(auth_token, api_version=ConfluenceAPIVersion.V1) + + def _build_query_cql(self, query: str, enable_fuzzy: bool) -> str: + """Build CQL for a single query (term or phrase). + + Args: + query: The search query (single word term or multi-word phrase) + enable_fuzzy: Whether to enable fuzzy matching for single terms + + Returns: + CQL string for the query + """ + query = query.strip() + if not query: + return "" + + # For phrases (multiple words), don't use fuzzy matching + if " " in query: + return f'(text ~ "{query}" OR title ~ "{query}" OR space.title ~ "{query}")' + else: + # For single terms, optionally use fuzzy matching + term_suffix = "~" if enable_fuzzy else "" + return f'(text ~ "{query}{term_suffix}" OR title ~ "{query}{term_suffix}" OR space.title ~ "{query}{term_suffix}")' # noqa: E501 + + def _build_and_cql(self, queries: list[str], enable_fuzzy: bool) -> str: + """Build CQL for queries that must ALL be present (AND logic). + + Args: + queries: List of queries that must all be present + enable_fuzzy: Whether to enable fuzzy matching for single terms + + Returns: + CQL string with AND logic + """ + and_parts = [] + for query in queries: + query_cql = self._build_query_cql(query, enable_fuzzy) + if query_cql: + and_parts.append(query_cql) + + if not and_parts: + return "" + + return f"({' AND '.join(and_parts)})" + + def _build_or_cql(self, queries: list[str], enable_fuzzy: bool) -> str: + """Build CQL for queries where ANY can be present (OR logic). + + Args: + queries: List of queries where any can be present + enable_fuzzy: Whether to enable fuzzy matching for single terms + + Returns: + CQL string with OR logic + """ + or_parts = [] + for query in queries: + query_cql = self._build_query_cql(query, enable_fuzzy) + if query_cql: + or_parts.append(query_cql) + + if not or_parts: + return "" + + return f"({' OR '.join(or_parts)})" + + def construct_cql( + self, + must_contain_all: list[str] | None, + can_contain_any: list[str] | None, + enable_fuzzy: bool = False, + ) -> str: + """Construct CQL query with AND/OR logic. + + Learn about advanced searching using CQL here: https://developer.atlassian.com/cloud/confluence/advanced-searching-using-cql/ + + Args: + must_contain_all: Queries that must ALL be present (AND logic) + can_contain_any: Queries where ANY can be present (OR logic) + enable_fuzzy: Whether to enable fuzzy matching for single terms + + Returns: + CQL query string + + Raises: + ToolExecutionError: If no search parameters are provided + """ + cql_parts = [] + + # Handle must_contain_all (AND logic) + if must_contain_all: + and_cql = self._build_and_cql(must_contain_all, enable_fuzzy) + if and_cql: + cql_parts.append(and_cql) + + # Handle can_contain_any (OR logic) + if can_contain_any: + or_cql = self._build_or_cql(can_contain_any, enable_fuzzy) + if or_cql: + cql_parts.append(or_cql) + + # If there's only one part, return it + if len(cql_parts) == 1: + return cql_parts[0] + + # AND the must_contain_all with the can_contain_any + if len(cql_parts) > 1: + return f"({' AND '.join(cql_parts)})" + + raise ToolExecutionError(message="At least one search parameter must be provided") + + def transform_search_content_response( + self, response: dict[str, Any] + ) -> dict[str, list[dict[str, Any]]]: + """ + Transform the response from the GET /search endpoint by converting relative webui paths + to absolute URLs using the base URL from the response. + """ + base_url = response.get("_links", {}).get("base", "") + transformed_results = [] + for result in response.get("results", []): + content = result.get("content", {}) + transformed_result = { + "id": content.get("id"), + "title": content.get("title"), + "type": content.get("type"), + "status": content.get("status"), + "excerpt": result.get("excerpt"), + "url": f"{base_url}{result.get('url')}", + } + transformed_results.append(transformed_result) + + return {"results": transformed_results} + + +class ConfluenceClientV2(ConfluenceClient): + def __init__(self): + auth_token = auth_token_context.get() + if not auth_token: + logger.warning("No auth token found in context") + super().__init__(auth_token, api_version=ConfluenceAPIVersion.V2) + + def _transform_links( + self, response: dict[str, Any], base_url: str | None = None + ) -> dict[str, Any]: + """ + Transform the links in a page response by converting relative URLs to absolute URLs. + + Args: + response: A page object from the API + base_url: The base URL to use for the transformation + + Returns: + The transformed response + """ + result = response.copy() + if "_links" in result: + base_url = base_url or result["_links"].get("base", "") + webui_path = result["_links"].get("webui", "") + result["url"] = f"{base_url}{webui_path}" + del result["_links"] + return result + + def transform_get_spaces_response( + self, response: dict[str, Any] + ) -> dict[str, list[dict[str, Any]]]: + """ + Transform the response from the GET /spaces endpoint by converting relative webui paths + to absolute URLs using the base URL from the response. + """ + pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( + "cursor", + [None], # type: ignore[list-item] + )[0] + + base_url = response.get("_links", {}).get("base", "") + results = response.get("results", []) + + transformed_results = [] + for space in results: + space_copy = space.copy() + if "_links" in space_copy and "webui" in space_copy["_links"]: + webui_path = space_copy["_links"]["webui"] + space_copy["url"] = base_url + webui_path + del space_copy["_links"] + transformed_results.append(space_copy) + + results = {"spaces": transformed_results, "pagination_token": pagination_token} + return remove_none_values(results) + + def transform_list_pages_response(self, response: dict[str, Any]) -> dict[str, Any]: + """Transform the response from the GET /pages endpoint.""" + pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( + "cursor", + [None], # type: ignore[list-item] + )[0] + + base_url = response.get("_links", {}).get("base", "") + pages = [self._transform_links(page, base_url) for page in response["results"]] + results = {"pages": pages, "pagination_token": pagination_token} + return remove_none_values(results) + + def transform_get_multiple_pages_response( + self, response: dict[str, Any] + ) -> dict[str, list[dict[str, Any]]]: + """Transform the response from the GET /pages endpoint.""" + base_url = response.get("_links", {}).get("base", "") + pages = [self._transform_links(page, base_url) for page in response["results"]] + return {"pages": pages} + + def transform_space_response( + self, response: dict[str, Any], base_url: str | None = None + ) -> dict[str, dict[str, Any]]: + """Transform API responses that return a space object.""" + return {"space": self._transform_links(response, base_url)} + + def transform_page_response(self, response: dict[str, Any]) -> dict[str, dict[str, Any]]: + """Transform API responses that return a page object.""" + return {"page": self._transform_links(response)} + + def transform_get_attachments_response(self, response: dict[str, Any]) -> dict[str, Any]: + """Transform the response from the GET /pages/{id}/attachments endpoint.""" + pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( + "cursor", + [None], # type: ignore[list-item] + )[0] + + base_url = response.get("_links", {}).get("base", "") + attachments = [] + for attachment in response["results"]: + result = attachment.copy() + if "_links" in result: + webui_path = result["_links"].get("webui", "") + download_path = result["_links"].get("download", "") + result["url"] = f"{base_url}{webui_path}" + result["download_link"] = f"{base_url}{download_path}" + del result["_links"] + del result["webuiLink"] + del result["downloadLink"] + del result["version"] + attachments.append(result) + + return {"attachments": attachments, "pagination_token": pagination_token} + + def prepare_update_page_payload( + self, + page_id: str, + status: str, + title: str, + body_representation: str, + body_value: str, + version_number: int, + version_message: str, + ) -> dict[str, Any]: + """Prepare a payload for the PUT /pages/{id} endpoint.""" + return { + "id": page_id, + "status": status, + "title": title, + "body": { + "representation": body_representation, + "value": body_value, + }, + "version": { + "number": version_number, + "message": version_message, + }, + } + + def prepare_update_page_content_payload( + self, + content: str, + update_mode: PageUpdateMode, + old_content: str, + page_id: str, + status: str, + title: str, + body_representation: BodyFormat, + old_version_number: int, + ) -> dict[str, Any]: + """Prepare a payload for when updating the content of a page + + Args: + content: The content to update the page with + update_mode: The mode of update to use + old_content: The content of the page before the update + page_id: The ID of the page to update + status: The status of the page + title: The title of the page + body_representation: The format that the body (content) is in + old_version_number: The version number of the page before the update + + Returns: + A payload for the PUT /pages/{id} endpoint's json body + """ + updated_content = "" + updated_message = "" + if update_mode == PageUpdateMode.APPEND: + updated_content = f"{old_content}
{content}" + updated_message = "Append content to the page" + elif update_mode == PageUpdateMode.PREPEND: + updated_content = f"{content}
{old_content}" + updated_message = "Prepend content to the page" + elif update_mode == PageUpdateMode.REPLACE: + updated_content = content + updated_message = "Replace the page content" + payload = self.prepare_update_page_payload( + page_id=page_id, + status=status, + title=title, + body_representation=body_representation.to_api_value(), + body_value=updated_content, + version_number=old_version_number + 1, + version_message=updated_message, + ) + return payload + + async def get_root_pages_in_space(self, space_id: str) -> dict[str, Any]: + """ + Get the root pages in a space. + + Requires Confluence scope 'read:page:confluence' + """ + params = { + "depth": "root", + "limit": 250, + } + pages = await self.get(f"spaces/{space_id}/pages", params=params) + base_url = pages.get("_links", {}).get("base", "") + return {"pages": [self._transform_links(page, base_url) for page in pages["results"]]} + + async def get_space_homepage(self, space_id: str) -> dict[str, Any]: + """ + Get the homepage of a space. + + Requires Confluence scope 'read:page:confluence' + """ + root_pages = await self.get_root_pages_in_space(space_id) + for page in root_pages["pages"]: + if page.get("url", "").endswith("overview"): + return self._transform_links(page) + raise ToolExecutionError(message="No homepage found for space.") + + async def get_page_by_id( + self, page_id: str, content_format: BodyFormat = BodyFormat.STORAGE + ) -> dict[str, Any]: + """Get a page by its ID. + + Requires Confluence scope 'read:page:confluence' + + Args: + page_id: The ID of the page to get + content_format: The format of the page content + + Returns: + The page object + """ + params = remove_none_values({ + "body-format": content_format.to_api_value(), + }) + try: + page = await self.get(f"pages/{page_id}", params=params) + except httpx.HTTPStatusError as e: + # If the page is not found, return an empty page object + if e.response.status_code in [400, 404]: + return self.transform_page_response({}) + raise + + return self.transform_page_response(page) + + async def get_page_by_title( + self, page_title: str, content_format: BodyFormat = BodyFormat.STORAGE + ) -> dict[str, Any]: + """Get a page by its title. + + Requires Confluence scope 'read:page:confluence' + + Args: + page_title: The title of the page to get + content_format: The format of the page content + + Returns: + The page object + """ + params = { + "title": page_title, + "body-format": content_format.to_api_value(), + } + response = await self.get("pages", params=params) + pages = response.get("results", []) + if not pages: + # If the page is not found, return an empty page object + return self.transform_page_response({}) + return self.transform_page_response(pages[0]) + + async def get_space_by_id(self, space_id: str) -> dict[str, Any]: + """Get a space by its ID. + + Requires Confluence scope 'read:space:confluence' + + Args: + space_id: The ID of the space to get + + Returns: + The space object + """ + space = await self.get(f"spaces/{space_id}") + return self.transform_space_response(space) + + async def get_space_by_key(self, space_key: str) -> dict[str, Any]: + """Get a space by its key. + + Requires Confluence scope 'read:space:confluence' + + Args: + space_key: The key of the space to get + + Returns: + The space object + """ + response = await self.get("spaces", params={"keys": [space_key]}) + base_url = response.get("_links", {}).get("base", "") + spaces = response.get("results", []) + if not spaces: + raise ToolExecutionError(message=f"No space found with key: '{space_key}'") + return self.transform_space_response(spaces[0], base_url=base_url) + + async def get_space(self, space_identifier: str) -> dict[str, Any]: + """Get a space from its identifier. + + The identifier can be either a space ID (numeric) or a space key (alphanumeric). + + Args: + space_identifier: The ID or key of the space to get + + Returns: + The space object + """ + if space_identifier.isdigit(): + return await self.get_space_by_id(space_identifier) + else: + return await self.get_space_by_key(space_identifier) + + async def get_page_id(self, page_identifier: str) -> str: + """Get a page ID from its identifier. + + The identifier can be either a page ID (numeric) or a page title (alphanumeric). + + Args: + page_identifier: The ID or title of the page to get + + Returns: + The page ID + """ + if page_identifier.isdigit(): + return page_identifier + else: + page = await self.get_page_by_title(page_identifier) + page_data = page.get("page", {}) + if not page_data: + raise ToolExecutionError(message=f"No page found with title: '{page_identifier}'") + return page_data["id"] + + async def get_space_id(self, space_identifier: str) -> str: + """Get a space ID from its identifier. + + The identifier can be either a space ID (numeric) or a space key (alphanumeric). + + Args: + space_identifier: The ID or key of the space to get + + Returns: + The space ID + """ + space = await self.get_space(space_identifier) + return space["space"]["id"] + + def create_space_tree(self, space: dict) -> dict: + """Create a space tree structure from space data.""" + space_data = space.get("space", {}) + + return { + "id": space_data.get("id"), + "key": space_data.get("key"), + "name": space_data.get("name"), + "type": "space", + "url": space_data.get("url", ""), + "description": space_data.get("description", {}).get("plain", ""), + "children": [] + } + + def convert_root_pages_to_tree_nodes(self, pages: list) -> list: + """Convert root pages to tree nodes.""" + tree_nodes = [] + + for page in pages: + node = { + "id": page.get("id"), + "title": page.get("title"), + "type": page.get("type", "page"), + "status": page.get("status", "current"), + "url": page.get("url", ""), + "children": [] + } + tree_nodes.append(node) + + return tree_nodes + + async def process_page_descendants(self, root_children: list, base_url: str) -> None: + """Process page descendants and build the hierarchy.""" + # For each root page, get its descendants + for root_child in root_children: + if root_child["type"] == "page": + try: + # Get descendants for this page + params = { + "expand": "ancestors" + } + response = await self.get(f"content/{root_child['id']}/descendant", params=params) + + # Process descendants into hierarchy + descendants = response.get("page", {}).get("results", []) + if descendants: + transformed_children = [] + for desc in descendants: + child_node = { + "id": desc.get("id"), + "title": desc.get("title"), + "type": desc.get("type", "page"), + "status": desc.get("status", "current"), + "parent_id": None, + "children": [] + } + + # Determine parent ID from ancestors + ancestors = desc.get("ancestors", []) + if ancestors: + child_node["parent_id"] = ancestors[-1].get("id") + + # Build URL + child_node["url"] = build_child_url(https://github.com/base_url,%20child_node) or "" + + transformed_children.append(child_node) + + # Build hierarchy + build_hierarchy(transformed_children, root_child["id"], root_child) + + except Exception: + # Log the error but continue processing other pages + continue \ No newline at end of file diff --git a/mcp_servers/confluence/enums.py b/mcp_servers/confluence/enums.py new file mode 100644 index 00000000..714ba9f5 --- /dev/null +++ b/mcp_servers/confluence/enums.py @@ -0,0 +1,115 @@ +from enum import Enum + + +class BodyFormat(str, Enum): + STORAGE = "storage" # Storage representation for editing, with relative urls in the markup + + def to_api_value(self) -> str: + mapping = { + BodyFormat.STORAGE: "storage", + } + return mapping.get(self, BodyFormat.STORAGE.value) + + +class PageUpdateMode(str, Enum): + """The mode of update for a page""" + + PREPEND = "prepend" # Add content to the beginning of the page. + APPEND = "append" # Add content to the end of the page. + REPLACE = "replace" # Replace the entire page with the new content. + + +class PageSortOrder(str, Enum): + """The order of the pages to sort by""" + + ID_ASCENDING = "id-ascending" + ID_DESCENDING = "id-descending" + TITLE_ASCENDING = "title-ascending" + TITLE_DESCENDING = "title-descending" + CREATED_DATE_ASCENDING = "created-date-oldest-to-newest" + CREATED_DATE_DESCENDING = "created-date-newest-to-oldest" + MODIFIED_DATE_ASCENDING = "modified-date-oldest-to-newest" + MODIFIED_DATE_DESCENDING = "modified-date-newest-to-oldest" + + def to_api_value(self) -> str: + mapping = { + PageSortOrder.ID_ASCENDING: "id", + PageSortOrder.ID_DESCENDING: "-id", + PageSortOrder.TITLE_ASCENDING: "title", + PageSortOrder.TITLE_DESCENDING: "-title", + PageSortOrder.CREATED_DATE_ASCENDING: "created-date", + PageSortOrder.CREATED_DATE_DESCENDING: "-created-date", + PageSortOrder.MODIFIED_DATE_ASCENDING: "modified-date", + PageSortOrder.MODIFIED_DATE_DESCENDING: "-modified-date", + } + return mapping.get(self, PageSortOrder.CREATED_DATE_DESCENDING.value) + + +class AttachmentSortOrder(str, Enum): + """The order of the attachments to sort by""" + + CREATED_DATE_ASCENDING = "created-date-oldest-to-newest" + CREATED_DATE_DESCENDING = "created-date-newest-to-oldest" + MODIFIED_DATE_ASCENDING = "modified-date-oldest-to-newest" + MODIFIED_DATE_DESCENDING = "modified-date-newest-to-oldest" + + def to_api_value(self) -> str: + mapping = { + AttachmentSortOrder.CREATED_DATE_ASCENDING: "created-date", + AttachmentSortOrder.CREATED_DATE_DESCENDING: "-created-date", + AttachmentSortOrder.MODIFIED_DATE_ASCENDING: "modified-date", + AttachmentSortOrder.MODIFIED_DATE_DESCENDING: "-modified-date", + } + return mapping.get(self, AttachmentSortOrder.CREATED_DATE_DESCENDING.value) + + +# Conversion functions to convert string values to enum objects +def convert_sort_by_to_enum(sort_by_str: str | None) -> PageSortOrder | None: + """Convert string sort_by value to PageSortOrder enum""" + if sort_by_str is None: + return None + + # Create a mapping from string values to enum values + string_to_enum = { + "id-ascending": PageSortOrder.ID_ASCENDING, + "id-descending": PageSortOrder.ID_DESCENDING, + "title-ascending": PageSortOrder.TITLE_ASCENDING, + "title-descending": PageSortOrder.TITLE_DESCENDING, + "created-date-oldest-to-newest": PageSortOrder.CREATED_DATE_ASCENDING, + "created-date-newest-to-oldest": PageSortOrder.CREATED_DATE_DESCENDING, + "modified-date-oldest-to-newest": PageSortOrder.MODIFIED_DATE_ASCENDING, + "modified-date-newest-to-oldest": PageSortOrder.MODIFIED_DATE_DESCENDING, + } + + return string_to_enum.get(sort_by_str, PageSortOrder.CREATED_DATE_DESCENDING) + + +def convert_sort_order_to_enum(sort_order_str: str | None) -> AttachmentSortOrder | None: + """Convert string sort_order value to AttachmentSortOrder enum""" + if sort_order_str is None: + return None + + # Create a mapping from string values to enum values + string_to_enum = { + "created-date-oldest-to-newest": AttachmentSortOrder.CREATED_DATE_ASCENDING, + "created-date-newest-to-oldest": AttachmentSortOrder.CREATED_DATE_DESCENDING, + "modified-date-oldest-to-newest": AttachmentSortOrder.MODIFIED_DATE_ASCENDING, + "modified-date-newest-to-oldest": AttachmentSortOrder.MODIFIED_DATE_DESCENDING, + } + + return string_to_enum.get(sort_order_str, AttachmentSortOrder.CREATED_DATE_DESCENDING) + + +def convert_update_mode_to_enum(update_mode_str: str | None) -> PageUpdateMode: + """Convert string update_mode value to PageUpdateMode enum""" + if update_mode_str is None: + return PageUpdateMode.APPEND + + # Create a mapping from string values to enum values + string_to_enum = { + "prepend": PageUpdateMode.PREPEND, + "append": PageUpdateMode.APPEND, + "replace": PageUpdateMode.REPLACE, + } + + return string_to_enum.get(update_mode_str, PageUpdateMode.APPEND) \ No newline at end of file diff --git a/mcp_servers/confluence/errors.py b/mcp_servers/confluence/errors.py new file mode 100644 index 00000000..dd5e6d04 --- /dev/null +++ b/mcp_servers/confluence/errors.py @@ -0,0 +1,26 @@ +class ToolExecutionError(Exception): + def __init__(self, message: str, developer_message: str = ""): + super().__init__(message) + self.developer_message = developer_message + + +class RetryableToolError(Exception): + def __init__(self, message: str, additional_prompt_content: str = "", retry_after_ms: int = 1000, developer_message: str = ""): + super().__init__(message) + self.additional_prompt_content = additional_prompt_content + self.retry_after_ms = retry_after_ms + self.developer_message = developer_message + +class AuthenticationError(ToolExecutionError): + def __init__(self, message: str, developer_message: str = ""): + super().__init__(message, developer_message) + + +class TokenExpiredError(AuthenticationError): + def __init__(self, message: str = "OAuth token has expired", developer_message: str = ""): + super().__init__(message, developer_message) + + +class InvalidTokenError(AuthenticationError): + def __init__(self, message: str = "OAuth token is invalid", developer_message: str = ""): + super().__init__(message, developer_message) \ No newline at end of file diff --git a/mcp_servers/confluence/requirements.txt b/mcp_servers/confluence/requirements.txt new file mode 100644 index 00000000..a5999950 --- /dev/null +++ b/mcp_servers/confluence/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette +asyncio \ No newline at end of file diff --git a/mcp_servers/confluence/server.py b/mcp_servers/confluence/server.py new file mode 100644 index 00000000..6941972f --- /dev/null +++ b/mcp_servers/confluence/server.py @@ -0,0 +1,657 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from errors import ToolExecutionError, AuthenticationError, TokenExpiredError, InvalidTokenError + +# Import tools +from tools import ( + # Page tools + create_page, get_page, get_pages_by_id, list_pages, rename_page, + update_page_content, + # Space tools + create_space, get_space, get_space_hierarchy, list_spaces, + # Search tools + search_content, + # Attachment tools + get_attachments_for_page, list_attachments, get_attachment, +) + +from enums import ( + convert_sort_by_to_enum, convert_sort_order_to_enum, convert_update_mode_to_enum +) + +# Import context for auth token +from client import auth_token_context + + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +CONFLUENCE_MCP_SERVER_PORT = int(os.getenv("CONFLUENCE_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=CONFLUENCE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + app = Server("confluence-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Page tools + types.Tool( + name="confluence_create_page", + description="Create a new page in Confluence", + inputSchema={ + "type": "object", + "properties": { + "space_identifier": { + "type": "string", + "description": "The ID or title of the space to create the page in", + }, + "title": { + "type": "string", + "description": "The title of the page", + }, + "content": { + "type": "string", + "description": "The content of the page. Only plain text is supported", + }, + "parent_id": { + "type": "string", + "description": "The ID of the parent. If not provided, the page will be created at the root of the space.", + }, + "is_private": { + "type": "boolean", + "description": "If true, then only the user who creates this page will be able to see it. Defaults to False", + }, + "is_draft": { + "type": "boolean", + "description": "If true, then the page will be created as a draft. Defaults to False", + }, + }, + "required": ["space_identifier", "title", "content"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE"} + ), + ), + types.Tool( + name="confluence_get_page", + description="Retrieve a SINGLE page's content by its ID or title. For retrieving MULTIPLE pages, use confluence_get_pages_by_id instead", + inputSchema={ + "type": "object", + "properties": { + "page_identifier": { + "type": "string", + "description": "Can be a page's ID or title. Numerical titles are NOT supported.", + }, + }, + "required": ["page_identifier"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_get_pages_by_id", + description="Get the content of MULTIPLE pages by their ID in a single efficient request", + inputSchema={ + "type": "object", + "properties": { + "page_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "The IDs of the pages to get. IDs are numeric. Titles of pages are NOT supported. Maximum of 250 page ids supported.", + }, + }, + "required": ["page_ids"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_list_pages", + description="Get the content of multiple pages with optional filtering and sorting", + inputSchema={ + "type": "object", + "properties": { + "space_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Restrict the response to only include pages in these spaces. Only space IDs are supported. Titles of spaces are NOT supported. Maximum of 100 space ids supported.", + }, + "sort_by": { + "type": "string", + "description": "The order of the pages to sort by. Defaults to created-date-newest-to-oldest", + "enum": ["id-ascending", "id-descending", "title-ascending", "title-descending", + "created-date-oldest-to-newest", "created-date-newest-to-oldest", + "modified-date-oldest-to-newest", "modified-date-newest-to-oldest"], + }, + "limit": { + "type": "integer", + "description": "The maximum number of pages to return. Defaults to 25. Max is 250", + "minimum": 1, + "maximum": 250, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to use for the next page of results", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_update_page_content", + description="Update a page's content", + inputSchema={ + "type": "object", + "properties": { + "page_identifier": { + "type": "string", + "description": "The ID or title of the page to update. Numerical titles are NOT supported.", + }, + "content": { + "type": "string", + "description": "The content of the page. Only plain text is supported", + }, + "update_mode": { + "type": "string", + "description": "The mode of update. Defaults to 'append'.", + "enum": ["prepend", "append", "replace"], + }, + }, + "required": ["page_identifier", "content"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE"} + ), + ), + types.Tool( + name="confluence_rename_page", + description="Rename a page by changing its title", + inputSchema={ + "type": "object", + "properties": { + "page_identifier": { + "type": "string", + "description": "The ID or title of the page to rename. Numerical titles are NOT supported.", + }, + "title": { + "type": "string", + "description": "The title of the page", + }, + }, + "required": ["page_identifier", "title"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_PAGE"} + ), + ), + # Space tools + types.Tool( + name="confluence_create_space", + description="Create a new space in Confluence", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the space", + }, + "key": { + "type": "string", + "description": "The key of the space. If not provided, one will be generated automatically", + }, + "description": { + "type": "string", + "description": "The description of the space", + }, + "is_private": { + "type": "boolean", + "description": "If true, the space will be private to the creator. Defaults to False", + }, + }, + "required": ["name"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_SPACE"} + ), + ), + types.Tool( + name="confluence_list_spaces", + description="List all spaces sorted by name in ascending order", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "The maximum number of spaces to return. Defaults to 25. Max is 250", + "minimum": 1, + "maximum": 250, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to use for the next page of results", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_SPACE", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_get_space", + description="Get the details of a space by its ID or key", + inputSchema={ + "type": "object", + "properties": { + "space_identifier": { + "type": "string", + "description": "Can be a space's ID or key. Numerical keys are NOT supported", + }, + }, + "required": ["space_identifier"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_SPACE", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_get_space_hierarchy", + description="Retrieve the full hierarchical structure of a Confluence space as a tree structure", + inputSchema={ + "type": "object", + "properties": { + "space_identifier": { + "type": "string", + "description": "Can be a space's ID or key. Numerical keys are NOT supported", + }, + }, + "required": ["space_identifier"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_SPACE", "readOnlyHint": True} + ), + ), + # Search tools + types.Tool( + name="confluence_search_content", + description="Search for content in Confluence. The search is performed across all content in the authenticated user's Confluence workspace. All search terms in Confluence are case insensitive.", + inputSchema={ + "type": "object", + "properties": { + "must_contain_all": { + "type": "array", + "items": {"type": "string"}, + "description": "Words/phrases that content MUST contain (AND logic). Each item can be: single word or multi-word phrase. All items in this list must be present for content to match.", + }, + "can_contain_any": { + "type": "array", + "items": {"type": "string"}, + "description": "Words/phrases where content can contain ANY of these (OR logic). Content matching ANY item in this list will be included.", + }, + "enable_fuzzy": { + "type": "boolean", + "description": "Enable fuzzy matching to find similar terms (e.g. 'roam' will find 'foam'). Defaults to True", + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (1-100). Defaults to 25", + "minimum": 1, + "maximum": 100, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_SEARCH", "readOnlyHint": True} + ), + ), + # Attachment tools + types.Tool( + name="confluence_list_attachments", + description="List attachments in a workspace", + inputSchema={ + "type": "object", + "properties": { + "sort_order": { + "type": "string", + "description": "The order of the attachments to sort by. Defaults to created-date-newest-to-oldest", + "enum": ["created-date-oldest-to-newest", "created-date-newest-to-oldest", + "modified-date-oldest-to-newest", "modified-date-newest-to-oldest"], + }, + "limit": { + "type": "integer", + "description": "The maximum number of attachments to return. Defaults to 25. Max is 250", + "minimum": 1, + "maximum": 250, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to use for the next page of results", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_ATTACHMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_get_attachments_for_page", + description="Get attachments for a page by its ID or title. If a page title is provided, then the first page with an exact matching title will be returned.", + inputSchema={ + "type": "object", + "properties": { + "page_identifier": { + "type": "string", + "description": "The ID or title of the page to get attachments for", + }, + "limit": { + "type": "integer", + "description": "The maximum number of attachments to return. Defaults to 25. Max is 250", + "minimum": 1, + "maximum": 250, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to use for the next page of results", + }, + }, + "required": ["page_identifier"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_ATTACHMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="confluence_get_attachment", + description="Get a specific attachment by its ID", + inputSchema={ + "type": "object", + "properties": { + "attachment_id": { + "type": "string", + "description": "The ID of the attachment to get", + }, + }, + "required": ["attachment_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "CONFLUENCE_ATTACHMENT", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + try: + # Page tools + if name == "confluence_create_page": + result = await create_page( + space_identifier=arguments["space_identifier"], + title=arguments["title"], + content=arguments["content"], + parent_id=arguments.get("parent_id"), + is_private=arguments.get("is_private", False), + is_draft=arguments.get("is_draft", False), + ) + elif name == "confluence_get_page": + result = await get_page( + page_identifier=arguments["page_identifier"], + ) + elif name == "confluence_get_pages_by_id": + result = await get_pages_by_id( + page_ids=arguments["page_ids"], + ) + elif name == "confluence_list_pages": + result = await list_pages( + space_ids=arguments.get("space_ids"), + sort_by=convert_sort_by_to_enum(arguments.get("sort_by")), + limit=arguments.get("limit", 25), + pagination_token=arguments.get("pagination_token"), + ) + elif name == "confluence_update_page_content": + result = await update_page_content( + page_identifier=arguments["page_identifier"], + content=arguments["content"], + update_mode=convert_update_mode_to_enum(arguments.get("update_mode", "append")), + ) + elif name == "confluence_rename_page": + result = await rename_page( + page_identifier=arguments["page_identifier"], + title=arguments["title"], + ) + # Space tools + elif name == "confluence_create_space": + result = await create_space( + name=arguments["name"], + key=arguments.get("key"), + description=arguments.get("description"), + is_private=arguments.get("is_private", False), + ) + elif name == "confluence_list_spaces": + result = await list_spaces( + limit=arguments.get("limit", 25), + pagination_token=arguments.get("pagination_token"), + ) + elif name == "confluence_get_space": + result = await get_space( + space_identifier=arguments["space_identifier"], + ) + elif name == "confluence_get_space_hierarchy": + result = await get_space_hierarchy( + space_identifier=arguments["space_identifier"], + ) + # Search tools + elif name == "confluence_search_content": + result = await search_content( + must_contain_all=arguments.get("must_contain_all"), + can_contain_any=arguments.get("can_contain_any"), + enable_fuzzy=arguments.get("enable_fuzzy", True), + limit=arguments.get("limit", 25), + ) + # Attachment tools + elif name == "confluence_list_attachments": + result = await list_attachments( + sort_order=convert_sort_order_to_enum(arguments.get("sort_order")), + limit=arguments.get("limit", 25), + pagination_token=arguments.get("pagination_token"), + ) + elif name == "confluence_get_attachments_for_page": + result = await get_attachments_for_page( + page_identifier=arguments["page_identifier"], + limit=arguments.get("limit", 25), + pagination_token=arguments.get("pagination_token"), + ) + elif name == "confluence_get_attachment": + result = await get_attachment( + attachment_id=arguments["attachment_id"], + ) + else: + raise ValueError(f"Unknown tool: {name}") + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except (AuthenticationError, TokenExpiredError, InvalidTokenError, ToolExecutionError) as e: + logger.error(f"Confluence error in {name}: {e}") + error_response = { + "error": str(e), + "developer_message": getattr(e, "developer_message", ""), + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + except Exception as e: + logger.exception(f"Unexpected error in tool {name}") + error_response = { + "error": f"Unexpected error: {str(e)}", + "developer_message": f"Unexpected error in tool {name}: {type(e).__name__}: {str(e)}", + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + try: + uvicorn.run( + starlette_app, + host="0.0.0.0", + port=port, + log_level=log_level.lower(), + ) + return 0 + except Exception as e: + logger.exception(f"Failed to start server: {e}") + return 1 + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/mcp_servers/confluence/tools/__init__.py b/mcp_servers/confluence/tools/__init__.py new file mode 100644 index 00000000..f70449fe --- /dev/null +++ b/mcp_servers/confluence/tools/__init__.py @@ -0,0 +1,32 @@ +from .attachment import get_attachments_for_page, list_attachments, get_attachment +from .page import ( + create_page, + get_page, + get_pages_by_id, + list_pages, + rename_page, + update_page_content, +) +from .search import search_content +from .space import create_space, get_space, get_space_hierarchy, list_spaces + +__all__ = [ + # Attachment + "get_attachments_for_page", + "list_attachments", + "get_attachment", + # Page + "create_page", + "get_pages_by_id", + "get_page", + "list_pages", + "rename_page", + "update_page_content", + # Search + "search_content", + # Space + "create_space", + "get_space", + "get_space_hierarchy", + "list_spaces", +] \ No newline at end of file diff --git a/mcp_servers/confluence/tools/attachment.py b/mcp_servers/confluence/tools/attachment.py new file mode 100644 index 00000000..cb35c009 --- /dev/null +++ b/mcp_servers/confluence/tools/attachment.py @@ -0,0 +1,75 @@ +from typing import Annotated + +from client import ConfluenceClientV2 +from enums import AttachmentSortOrder +from utils import remove_none_values + + +async def get_attachments_for_page( + page_identifier: Annotated[str, "The ID or title of the page to get attachments for"], + limit: Annotated[ + int, "The maximum number of attachments to return. Defaults to 25. Max is 250" + ] = 25, + pagination_token: Annotated[ + str | None, + "The pagination token to use for the next page of results", + ] = None, +) -> Annotated[dict, "The attachments"]: + """Get attachments for a page by its ID or title. + + If a page title is provided, then the first page with an exact matching title will be returned. + """ + client = ConfluenceClientV2() + page_id = await client.get_page_id(page_identifier) + + params = remove_none_values({ + "limit": max(1, min(limit, 250)), + }) + + # Only add cursor parameter if pagination_token has a value + if pagination_token: + params["cursor"] = pagination_token + attachments = await client.get(f"pages/{page_id}/attachments", params=params) + return client.transform_get_attachments_response(attachments) + + +async def list_attachments( + sort_order: Annotated[ + AttachmentSortOrder | None, + "The order of the attachments to sort by. Defaults to created-date-newest-to-oldest", + ] = None, + limit: Annotated[ + int, "The maximum number of attachments to return. Defaults to 25. Max is 250" + ] = 25, + pagination_token: Annotated[ + str | None, + "The pagination token to use for the next page of results", + ] = None, +) -> Annotated[dict, "The attachments"]: + """List attachments in a workspace""" + client = ConfluenceClientV2() + + # Handle sort_order - use default if None + if sort_order is None: + sort_order = AttachmentSortOrder.CREATED_DATE_DESCENDING + + params = remove_none_values({ + "sort": sort_order.to_api_value(), + "limit": max(1, min(limit, 250)), + }) + + # Only add cursor parameter if pagination_token has a value + if pagination_token: + params["cursor"] = pagination_token + attachments = await client.get("attachments", params=params) + return client.transform_get_attachments_response(attachments) + + +async def get_attachment( + attachment_id: Annotated[str, "The ID of the attachment to get"], +) -> Annotated[dict, "The attachment"]: + """Get a specific attachment by its ID""" + client = ConfluenceClientV2() + + response = await client.get(f"attachments/{attachment_id}") + return client.transform_attachment_response(response) \ No newline at end of file diff --git a/mcp_servers/confluence/tools/page.py b/mcp_servers/confluence/tools/page.py new file mode 100644 index 00000000..9219cc9d --- /dev/null +++ b/mcp_servers/confluence/tools/page.py @@ -0,0 +1,208 @@ +from typing import Annotated + +from client import ConfluenceClientV2 +from enums import BodyFormat, PageSortOrder, PageUpdateMode +from errors import ToolExecutionError +from utils import remove_none_values, validate_ids + + +async def create_page( + space_identifier: Annotated[str, "The ID or title of the space to create the page in"], + title: Annotated[str, "The title of the page"], + content: Annotated[str, "The content of the page. Only plain text is supported"], + parent_id: Annotated[ + str | None, + "The ID of the parent. If not provided, the page will be created at the root of the space.", + ] = None, + is_private: Annotated[ + bool, + "If true, then only the user who creates this page will be able to see it. " + "Defaults to False", + ] = False, + is_draft: Annotated[ + bool, + "If true, then the page will be created as a draft. Defaults to False", + ] = False, +) -> Annotated[dict, "The page"]: + """Create a new page at the root of the given space.""" + client = ConfluenceClientV2() + space_id = await client.get_space_id(space_identifier) + + parent_id = parent_id or (await client.get_space_homepage(space_id)).get("id") + params = remove_none_values({ + "root-level": False, + "private": is_private, + }) + + body = remove_none_values({ + "spaceId": space_id, + "status": "draft" if is_draft else None, + "parentId": parent_id, + "title": title, + "body": { + "storage": { + "value": content, + "representation": BodyFormat.STORAGE.to_api_value(), + } + }, + }) + page = await client.post("pages", params=params, json=body) + return client.transform_page_response(page) + + +async def update_page_content( + page_identifier: Annotated[ + str, "The ID or title of the page to update. Numerical titles are NOT supported." + ], + content: Annotated[str, "The content of the page. Only plain text is supported"], + update_mode: Annotated[ + PageUpdateMode, + "The mode of update. Defaults to 'append'.", + ] = PageUpdateMode.APPEND, +) -> Annotated[dict, "The page"]: + """Update a page's content.""" + # Get the page to update + client = ConfluenceClientV2() + page_id = await client.get_page_id(page_identifier) + + page = await get_page(page_identifier) + if not page.get("page"): + raise ToolExecutionError(message=f"No page found with identifier: '{page_identifier}'") + status = page.get("page", {}).get("status", "current") + title = page.get("page", {}).get("title", "Untitled page") + body = page.get("page", {}).get("body", {}) + old_content = body.get(BodyFormat.STORAGE, {}).get("value", "") + old_version_number = page.get("page", {}).get("version", {}).get("number", 0) + + # Update the page content + payload = client.prepare_update_page_content_payload( + content=content, + update_mode=update_mode, + old_content=old_content, + page_id=page_id, + status=status, + title=title, + body_representation=BodyFormat.STORAGE, + old_version_number=old_version_number, + ) + updated_page = await client.put(f"pages/{page_id}", json=payload) + + return client.transform_page_response(updated_page) + + +async def rename_page( + page_identifier: Annotated[ + str, "The ID or title of the page to rename. Numerical titles are NOT supported." + ], + title: Annotated[str, "The title of the page"], +) -> Annotated[dict, "The page"]: + """Rename a page by changing its title.""" + # Get the page to rename + client = ConfluenceClientV2() + page_id = await client.get_page_id(page_identifier) + + page = await get_page(page_identifier) + if not page.get("page"): + raise ToolExecutionError(message=f"No page found with identifier: '{page_identifier}'") + status = page.get("page", {}).get("status", "current") + content = page.get("page", {}).get("body", {}).get(BodyFormat.STORAGE, {}).get("value", "") + old_version_number = page.get("page", {}).get("version", {}).get("number", 0) + + # Rename the page + payload = client.prepare_update_page_payload( + page_id=page_id, + status=status, + title=title, + body_representation=BodyFormat.STORAGE, + body_value=content, + version_number=old_version_number + 1, + version_message="Rename the page", + ) + updated_page = await client.put(f"pages/{page_id}", json=payload) + + return client.transform_page_response(updated_page) + + +async def get_page( + page_identifier: Annotated[ + str, "Can be a page's ID or title. Numerical titles are NOT supported." + ], +) -> Annotated[dict, "The page"]: + """Retrieve a SINGLE page's content by its ID or title. + + If a title is provided, then the first page with an exact matching title will be returned. + + IMPORTANT: For retrieving MULTIPLE pages, use `get_pages_by_id` instead + for a massive performance and efficiency boost. If you call this function multiple times + instead of using `get_pages_by_id`, then the universe will explode. + """ + client = ConfluenceClientV2() + if page_identifier.isdigit(): + return await client.get_page_by_id(page_identifier, BodyFormat.STORAGE) + else: + return await client.get_page_by_title(page_identifier, BodyFormat.STORAGE) + + +async def get_pages_by_id( + page_ids: Annotated[ + list[str], + "The IDs of the pages to get. IDs are numeric. Titles of pages are NOT supported. " + "Maximum of 250 page ids supported.", + ], +) -> Annotated[dict, "The pages"]: + """Get the content of MULTIPLE pages by their ID in a single efficient request. + + IMPORTANT: Always use this function when you need to retrieve content from more than one page, + rather than making multiple separate calls to get_page, because this function is significantly + more efficient than calling get_page multiple times. + """ + validate_ids(page_ids, max_length=250) + client = ConfluenceClientV2() + pages = await client.get( + "pages", params={"id": page_ids, "body-format": BodyFormat.STORAGE.to_api_value()} + ) + return client.transform_get_multiple_pages_response(pages) + + +async def list_pages( + space_ids: Annotated[ + list[str] | None, + "Restrict the response to only include pages in these spaces. " + "Only space IDs are supported. Titles of spaces are NOT supported. " + "If not provided, then no restriction is applied. " + "Maximum of 100 space ids supported.", + ] = None, + sort_by: Annotated[ + PageSortOrder | None, + "The order of the pages to sort by. Defaults to created-date-newest-to-oldest", + ] = None, + limit: Annotated[int, "The maximum number of pages to return. Defaults to 25. Max is 250"] = 25, + pagination_token: Annotated[ + str | None, + "The pagination token to use for the next page of results", + ] = None, +) -> Annotated[dict, "The pages"]: + """Get the content of multiple pages by their ID""" + validate_ids(space_ids, max_length=100) + limit = max(1, min(limit, 250)) + client = ConfluenceClientV2() + + # Handle sort_by - use default if None + if sort_by is None: + sort_by = PageSortOrder.CREATED_DATE_DESCENDING + + params = remove_none_values({ + "space-id": space_ids, + "sort": sort_by.to_api_value(), + "body-format": BodyFormat.STORAGE.to_api_value(), + "limit": limit, + }) + + # Only add cursor parameter if pagination_token has a value + if pagination_token: + params["cursor"] = pagination_token + pages = await client.get("pages", params=params) + return client.transform_list_pages_response(pages) + + + \ No newline at end of file diff --git a/mcp_servers/confluence/tools/search.py b/mcp_servers/confluence/tools/search.py new file mode 100644 index 00000000..7c6b6293 --- /dev/null +++ b/mcp_servers/confluence/tools/search.py @@ -0,0 +1,44 @@ +from typing import Annotated + +from client import ConfluenceClientV1 + + +async def search_content( + must_contain_all: Annotated[ + list[str] | None, + "Words/phrases that content MUST contain (AND logic). Each item can be:\n" + "- Single word: 'banana' - content must contain this word\n" + "- Multi-word phrase: 'How to' - content must contain all these words (in any order)\n" + "- All items in this list must be present for content to match\n" + "- Example: ['banana', 'apple'] finds content containing BOTH 'banana' AND 'apple'", + ] = None, + can_contain_any: Annotated[ + list[str] | None, + "Words/phrases where content can contain ANY of these (OR logic). Each item can be:\n" + "- Single word: 'project' - content containing this word will match\n" + "- Multi-word phrase: 'pen & paper' - content containing all these words will match\n" + "- Content matching ANY item in this list will be included\n" + "- Example: ['project', 'documentation'] finds content with 'project' OR 'documentation'", + ] = None, + enable_fuzzy: Annotated[ + bool, + "Enable fuzzy matching to find similar terms (e.g. 'roam' will find 'foam'). " + "Defaults to True", + ] = True, + limit: Annotated[int, "Maximum number of results to return (1-100). Defaults to 25"] = 25, +) -> Annotated[dict, "Search results containing content items matching the criteria"]: + """Search for content in Confluence. + + The search is performed across all content in the authenticated user's Confluence workspace. + All search terms in Confluence are case insensitive. + + You can use the parameters in different ways: + - must_contain_all: For AND logic - content must contain ALL of these + - can_contain_any: For OR logic - content can contain ANY of these + - Combine them: must_contain_all=['banana'] AND can_contain_any=['database', 'guide'] + """ + client = ConfluenceClientV1() + cql = client.construct_cql(must_contain_all, can_contain_any, enable_fuzzy) + response = await client.get("search", params={"cql": cql, "limit": max(1, min(limit, 100))}) + + return client.transform_search_content_response(response) \ No newline at end of file diff --git a/mcp_servers/confluence/tools/space.py b/mcp_servers/confluence/tools/space.py new file mode 100644 index 00000000..36ca99e4 --- /dev/null +++ b/mcp_servers/confluence/tools/space.py @@ -0,0 +1,163 @@ +import re +from typing import Annotated + +from client import ConfluenceClientV1 +from client import ConfluenceClientV2 +from errors import ToolExecutionError + + +async def create_space( + name: Annotated[str, "The name of the space"], + key: Annotated[str | None, "The key of the space. If not provided, one will be generated automatically"] = None, + description: Annotated[str | None, "The description of the space"] = None, + is_private: Annotated[bool, "If true, the space will be private to the creator. Defaults to False"] = False, +) -> Annotated[dict, "The created space"]: + """Create a new space in Confluence. + + Creates a new space with the specified name, key, and description. + The space can be either public (default) or private to the creator. + """ + # Use V2 API for space creation as it's the current standard + client = ConfluenceClientV2() + + # Prepare the space data according to v2 API format + space_data = { + "name": name, + } + + # Automatically generate a space key if one is not provided. A Confluence space key + # must be 1-255 characters long and only contain upper-case letters and numbers. + # We generate it by taking the first character of each word in the space name and + # falling back to the first 3 alphanumeric characters when the result is too short. + if key is None: + # Remove any character that is not a letter or number and split the words + words = re.findall(r"[A-Za-z0-9]+", name) + generated_key = "".join(word[0] for word in words).upper() + if len(generated_key) < 3: + generated_key = re.sub(r"[^A-Za-z0-9]", "", name.upper())[:3] + key = generated_key[:50] # keep it short but well within 255-char limit + + # Add key (generated or provided by the caller) + space_data["key"] = key + + # Add description if provided (v2 API format) + if description: + space_data["description"] = { + "value": description, + "representation": "plain" + } + + # Note: v2 API doesn't have a separate private space endpoint + # Private spaces are created through roleAssignments or permissions + if is_private: + # For private spaces, we'll create a regular space and note the limitation + # The user will need to set permissions manually or through the UI + pass + + # Create the space using v2 API endpoint + try: + response = await client.post("spaces", json=space_data) + except Exception as e: + # If the v2 API returns 403 Forbidden (which can happen on some tenants) + # fall back to the older v1 endpoint. + print(f"--- error: {e}, falling back to v1 endpoint") + if isinstance(e, ToolExecutionError) and "HTTP 403" in str(e): + client_v1 = ConfluenceClientV1() + v1_data = { + "key": key, + "name": name, + } + if description: + v1_data["description"] = { + "plain": { + "value": description, + "representation": "plain", + } + } + response = await client_v1.post("space", json=v1_data) + else: + raise + + # Transform the response to match our format + space_copy = response.copy() + + # Add URL if available from _links + if "_links" in space_copy and "webui" in space_copy["_links"]: + space_copy["url"] = space_copy["_links"]["webui"] + + # Clean up _links if present + if "_links" in space_copy: + del space_copy["_links"] + + return {"space": space_copy} + + +async def get_space( + space_identifier: Annotated[ + str, "Can be a space's ID or key. Numerical keys are NOT supported" + ], +) -> Annotated[dict, "The space"]: + """Get the details of a space by its ID or key.""" + client = ConfluenceClientV2() + if space_identifier.isdigit(): + return await client.get_space_by_id(space_identifier) + else: + return await client.get_space_by_key(space_identifier) + + +async def list_spaces( + limit: Annotated[ + int, "The maximum number of spaces to return. Defaults to 25. Max is 250" + ] = 25, + pagination_token: Annotated[ + str | None, "The pagination token to use for the next page of results" + ] = None, +) -> Annotated[dict, "The spaces"]: + """List all spaces sorted by name in ascending order.""" + client = ConfluenceClientV2() + params = {"limit": max(1, min(limit, 250)), "sort": "name"} + + # Only add cursor parameter if pagination_token has a value + if pagination_token: + params["cursor"] = pagination_token + spaces = await client.get("spaces", params=params) + return client.transform_get_spaces_response(spaces) + + +async def get_space_hierarchy( + space_identifier: Annotated[ + str, "Can be a space's ID or key. Numerical keys are NOT supported" + ], +) -> Annotated[dict, "The space hierarchy"]: + """Retrieve the full hierarchical structure of a Confluence space as a tree structure + + Only structural metadata is returned (not content). + The response is akin to the sidebar in the Confluence UI. + + Includes all pages, folders, whiteboards, databases, + smart links, etc. organized by parent-child relationships. + """ + client = ConfluenceClientV2() + + space = await client.get_space(space_identifier) + tree = client.create_space_tree(space) + + # Get root pages + root_pages = await client.get_root_pages_in_space(space["space"]["id"]) + tree["children"] = client.convert_root_pages_to_tree_nodes(root_pages["pages"]) + + if not tree["children"]: + return tree + + # Extract base URL for children URLs. The base URL is the space's URL. + root_page_url = tree["url"] + match = re.match(r"(.*?/spaces/[^/]+)", root_page_url) + children_base_url = match.group(1) if match else "" + + # Get descendants for each root page + await client.process_page_descendants(tree["children"], children_base_url) + + return tree + + + \ No newline at end of file diff --git a/mcp_servers/confluence/utils.py b/mcp_servers/confluence/utils.py new file mode 100644 index 00000000..05815dd7 --- /dev/null +++ b/mcp_servers/confluence/utils.py @@ -0,0 +1,90 @@ +import re + +from errors import ToolExecutionError, RetryableToolError + + +def remove_none_values(data: dict) -> dict: + """Remove all keys with None values from the dictionary.""" + return {k: v for k, v in data.items() if v is not None} + + +def validate_ids(ids: list[str] | None, max_length: int) -> None: + """Validate a list of IDs. The ids can be page ids, space ids, etc. + + A valid id is a string that is a number. + + Args: + ids: A list of IDs to validate. + max_length: Maximum number of IDs allowed. + + Returns: + None + + Raises: + ToolExecutionError: If any of the IDs are not valid. + RetryableToolError: If the number of IDs is greater than the max length. + """ + if not ids: + return + if len(ids) > max_length: + raise RetryableToolError( + message=f"The 'ids' parameter must have less than {max_length} items. Got {len(ids)}" + ) + if any(not id_.isdigit() for id_ in ids): + raise ToolExecutionError(message="Invalid ID provided. IDs are numeric") + + +def build_child_url(https://github.com/base_url:%20str,%20child:%20dict) -> str | None: + """Build URL for a child node based on its type and status. + + Args: + base_url: The base URL for the Confluence space + child: A dictionary representing a Confluence content item + + Returns: + The URL for the child, or None if it can't be determined + """ + if child["type"] in ("whiteboard", "database", "embed"): + return f"{base_url}/{child['type']}/{child['id']}" + elif child["type"] == "folder": + return None + elif child["type"] == "page": + parsed_title = re.sub(r"[ '\s]+", "+", child["title"].strip()) + if child.get("status") == "draft": + return f"{base_url}/{child['type']}s/edit-v2/{child['id']}" + else: + return f"{base_url}/{child['type']}s/{child['id']}/{parsed_title}" + return None + + +def build_hierarchy(transformed_children: list, parent_id: str, parent_node: dict) -> None: + """Build parent-child hierarchy from a flat list of descendants. + + This function takes a flat list of items that have parent_id references and + builds a hierarchical tree structure. It modifies the parent_node in place. + + Args: + transformed_children: List of child nodes with parent_id fields + parent_id: The ID of the parent node + parent_node: The parent node to attach direct children to + + Returns: + None (modifies parent_node in place) + """ + # Create a map of children by their ID for efficient lookups + child_map = {child["id"]: child for child in transformed_children} + + # Find all direct children of the given parent_id + direct_children = [] + for child in transformed_children: + if child.get("parent_id") == parent_id: + direct_children.append(child) + elif child.get("parent_id") in child_map: + # Add child to its parent's children list + parent = child_map[child.get("parent_id")] + if "children" not in parent: + parent["children"] = [] + parent["children"].append(child) + + # Set the direct children on the parent node + parent_node["children"] = direct_children \ No newline at end of file diff --git a/mcp_servers/discord/Dockerfile b/mcp_servers/discord/Dockerfile index 8ccdb3b7..2c4e9451 100644 --- a/mcp_servers/discord/Dockerfile +++ b/mcp_servers/discord/Dockerfile @@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ # Copy only the requirements first to leverage Docker cache COPY mcp_servers/discord/requirements.txt . -COPY mcp_servers/discord/.env . +COPY mcp_servers/discord/.env.example .env RUN pip install --no-cache-dir -r requirements.txt # Copy the server code diff --git a/mcp_servers/discord/README.md b/mcp_servers/discord/README.md index 767e0c5d..5733c388 100644 --- a/mcp_servers/discord/README.md +++ b/mcp_servers/discord/README.md @@ -1,182 +1,71 @@ # Discord MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -[![Python: 3.12+](https://img.shields.io/badge/Python-3.12+-blue.svg)](https://www.python.org/downloads/) -[![FastAPI](https://img.shields.io/badge/FastAPI-0.100.0+-00a393.svg)](https://fastapi.tiangolo.com/) -[![Discord API](https://img.shields.io/badge/Discord_API-v10-5865F2.svg)](https://discord.com/developers/docs/intro) +A Model Context Protocol (MCP) server for Discord integration. Send messages, manage channels, and interact with Discord servers using Discord's API with OAuth support. -## šŸ“– Overview +## šŸš€ Quick Start - Run in 30 Seconds -Discord MCP Server is a Model Context Protocol (MCP) implementation that bridges language models and other applications with Discord's API. It provides a standardized interface for executing Discord operations through various tools defined by the MCP standard. +### 🌐 Using Hosted Service (Recommended for Production) -## šŸš€ Features +Get instant access to Discord with our managed infrastructure - **no setup required**: -This server provides the following capabilities through MCP tools: - -| Tool | Description | -|------|-------------| -| `get_server_info` | Retrieve detailed information about a Discord server (guild) | -| `list_members` | List members of a server with customizable result limits | -| `create_text_channel` | Create a new text channel with optional category and topic | -| `send_message` | Send a message to a specified channel | -| `read_messages` | Retrieve recent messages from a channel | -| `add_reaction` | Add a single emoji reaction to a message | -| `add_multiple_reactions` | Add multiple emoji reactions to a message | -| `remove_reaction` | Remove a specific reaction from a message | -| `get_user_info` | Retrieve information about a specific Discord user | - -## šŸ”§ Prerequisites - -You'll need one of the following: - -- **Docker:** Docker installed and running (recommended) -- **Python:** Python 3.12+ with pip - -## āš™ļø Setup & Configuration - -### Discord Bot Setup - -1. **Create a Discord Bot**: - - Visit the [Discord Developer Portal](https://discord.com/developers/applications) - - Create a new application and add a bot user - - Under the "Bot" section, enable the following Privileged Gateway Intents: - - Server Members Intent - - Message Content Intent - - Copy your Bot Token - -2. **Invite the Bot**: - - Navigate to OAuth2 > URL Generator - - Select scopes: `bot` and `applications.commands` - - Select bot permissions: - - Read Messages/View Channels - - Send Messages - - Manage Messages (for reactions) - - Manage Channels (for channel creation) - - Use the generated URL to invite the bot to your server - -### Environment Configuration - -1. **Create your environment file**: - ```bash - cp .env.example .env - ``` - -2. **Edit the `.env` file** with your bot token: - ``` - DISCORD_TOKEN=YOUR_ACTUAL_DISCORD_BOT_TOKEN - DISCORD_MCP_SERVER_PORT=5000 - ``` - -## šŸƒā€ā™‚ļø Running the Server - -### Option 1: Docker (Recommended) - -The Docker build must be run from the project root directory (`klavis/`): +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** ```bash -# Navigate to the root directory of the project -cd /path/to/klavis - -# Build the Docker image -docker build -t discord-mcp-server -f mcp_servers/discord/Dockerfile . - -# Run the container -docker run -d -p 5000:5000 --name discord-mcp discord-mcp-server +pip install klavis +# or +npm install klavis ``` -To use your local .env file instead of building it into the image: +```python +from klavis import Klavis -```bash -docker run -d -p 5000:5000 --env-file mcp_servers/discord/.env --name discord-mcp discord-mcp-server +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("DISCORD", "user123") ``` -### Option 2: Python Virtual Environment +### 🐳 Using Docker (For Self-Hosting) ```bash -# Create and activate virtual environment -python -m venv venv -source venv/bin/activate # On Windows: venv\Scripts\activate +# Pull latest image +docker pull ghcr.io/klavis-ai/discord-mcp-server:latest -# Install dependencies -pip install -r requirements.txt -# Run the server -python server.py +# Run Discord MCP Server +docker run -p 5000:5000 -e DISCORD_TOKEN=$DISCORD_TOKEN \ + ghcr.io/klavis-ai/discord-mcp-server:latest ``` -Once running, the server will be accessible at `http://localhost:5000`. +## šŸ› ļø Available Tools -## šŸ”Œ API Usage +- **Message Management**: Send, edit, and delete messages +- **Channel Operations**: Manage channels and channel permissions +- **Server Management**: Get server information and member details +- **User Interactions**: Manage user roles and permissions +- **Bot Operations**: Handle bot-specific Discord functionality -The server implements the Model Context Protocol (MCP) standard. Here's an example of how to call a tool: +## šŸ“š Documentation & Support -```python -import httpx - -async def call_discord_tool(): - url = "http://localhost:5000/execute" - payload = { - "tool_name": "send_message", - "tool_args": { - "channel_id": "123456789012345678", - "content": "Hello from MCP!" - } - } - - async with httpx.AsyncClient() as client: - response = await client.post(url, json=payload) - result = response.json() - return result -``` - -## šŸ“‹ Common Operations - -### Getting Server Information - -```python -payload = { - "tool_name": "get_server_info", - "tool_args": { - "server_id": "YOUR_SERVER_ID" - } -} -``` - -### Sending a Message - -```python -payload = { - "tool_name": "send_message", - "tool_args": { - "channel_id": "YOUR_CHANNEL_ID", - "content": "Hello from Discord MCP Server!" - } -} -``` - -## šŸ› ļø Troubleshooting - -### Docker Build Issues - -- **File Not Found Errors**: If you see errors like `failed to compute cache key: failed to calculate checksum of ref: not found`, this means Docker can't find the files referenced in the Dockerfile. Make sure you're building from the root project directory (`klavis/`), not from the server directory. - -### Common Runtime Issues - -- **API Errors**: Check Discord API documentation for error meanings -- **Authentication Failures**: Verify your bot token is correct and hasn't expired -- **Missing Permissions**: Ensure your bot has the necessary permissions in the server -- **Intents Issues**: Confirm you've enabled the required intents in the Developer Portal +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | ## šŸ¤ Contributing -Contributions are welcome! Please feel free to submit a Pull Request. - -1. Fork the repository -2. Create your feature branch (`git checkout -b feature/amazing-feature`) -3. Commit your changes (`git commit -m 'Add some amazing feature'`) -4. Push to the branch (`git push origin feature/amazing-feature`) -5. Open a Pull Request +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. ## šŸ“œ License -This project is licensed under the MIT License - see the LICENSE file for details. \ No newline at end of file +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/discord/requirements.txt b/mcp_servers/discord/requirements.txt index 83ebade7..1fbaa306 100644 --- a/mcp_servers/discord/requirements.txt +++ b/mcp_servers/discord/requirements.txt @@ -1,6 +1,6 @@ aiohttp>=3.8.0 python-dotenv>=1.0.0 -mcp>=1.6.0 +mcp==1.11.0 httpx>=0.27.0 fastapi uvicorn[standard] diff --git a/mcp_servers/discord/server.py b/mcp_servers/discord/server.py index 109eaeba..5ee11284 100644 --- a/mcp_servers/discord/server.py +++ b/mcp_servers/discord/server.py @@ -1,17 +1,30 @@ import os import logging +import contextlib +from collections.abc import AsyncIterator from typing import Any, Dict, List, Optional, Annotated -import urllib.parse # Added for URL encoding emojis + +import click +import aiohttp +import urllib.parse from dotenv import load_dotenv -import aiohttp # Added for HTTP requests -from mcp.server.fastmcp import FastMCP +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send from pydantic import Field load_dotenv() +# Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger("discord-mcp-server") +# Discord API constants and configuration DISCORD_TOKEN = os.getenv("DISCORD_TOKEN") if not DISCORD_TOKEN: raise ValueError("DISCORD_TOKEN environment variable is required") @@ -19,8 +32,6 @@ DISCORD_API_BASE = "https://discord.com/api/v10" DISCORD_MCP_SERVER_PORT = int(os.getenv("DISCORD_MCP_SERVER_PORT", "5000")) -mcp = FastMCP("discord-server", port=DISCORD_MCP_SERVER_PORT) - # Helper function to create standard headers for Discord API calls def _get_discord_headers() -> Dict[str, str]: return { @@ -76,16 +87,7 @@ async def _make_discord_request(method: str, endpoint: str, json_data: Optional[ logger.error(f"An unexpected error occurred during Discord API request: {e}") raise RuntimeError(f"Unexpected error during API call to {method} {url}") from e - -@mcp.tool() -async def get_server_info( - server_id: Annotated[ - str, - Field( - description="The ID of the Discord server (guild) to retrieve information for." - ), - ] -) -> Dict[str, Any]: +async def get_server_info(server_id: str) -> Dict[str, Any]: """Get information about a Discord server (guild).""" logger.info(f"Executing tool: get_server_info with server_id: {server_id}") try: @@ -105,25 +107,10 @@ async def get_server_info( } return info except Exception as e: - logger.exception(f"Error executing tool get_server_info: {e}") - raise e + logger.exception(f"Error executing tool get_server_info: {e}") + raise e -@mcp.tool() -async def list_members( - server_id: Annotated[ - str, - Field( - description="The ID of the Discord server (guild)." - ), - ], - limit: Annotated[ - int, - Field( - description="The maximum number of members to return (1-1000).", - default=100 - ), - ] = 100 -) -> List[Dict[str, Any]]: +async def list_members(server_id: str, limit: int = 100) -> List[Dict[str, Any]]: """Get a list of members in a server (Default 100, Max 1000).""" logger.info(f"Executing tool: list_members with server_id: {server_id}, limit: {limit}") try: @@ -153,33 +140,7 @@ async def list_members( logger.exception(f"Error executing tool list_members: {e}") raise e -@mcp.tool() -async def create_text_channel( - server_id: Annotated[ - str, - Field( - description="The ID of the Discord server (guild) where the channel will be created." - ), - ], - name: Annotated[ - str, - Field( - description="The name for the new text channel." - ), - ], - category_id: Annotated[ - Optional[str], - Field( - description="The ID of the category (parent channel) to place the new channel under." - ), - ] = None, - topic: Annotated[ - Optional[str], - Field( - description="The topic for the new channel." - ), - ] = None -) -> Dict[str, Any]: +async def create_text_channel(server_id: str, name: str, category_id: Optional[str] = None, topic: Optional[str] = None) -> Dict[str, Any]: """Create a new text channel.""" logger.info(f"Executing tool: create_text_channel '{name}' in server {server_id}") try: @@ -204,27 +165,7 @@ async def create_text_channel( logger.exception(f"Error executing tool create_text_channel: {e}") raise e -@mcp.tool() -async def add_reaction( - channel_id: Annotated[ - str, - Field( - description="The ID of the channel containing the message." - ), - ], - message_id: Annotated[ - str, - Field( - description="The ID of the message to add the reaction to." - ), - ], - emoji: Annotated[ - str, - Field( - description="The emoji to add as a reaction. Can be a standard Unicode emoji or a custom emoji in the format `name:id`." - ), - ] -) -> str: +async def add_reaction(channel_id: str, message_id: str, emoji: str) -> str: """Add a reaction to a message.""" logger.info(f"Executing tool: add_reaction '{emoji}' to message {message_id} in channel {channel_id}") try: @@ -238,27 +179,7 @@ async def add_reaction( logger.exception(f"Error executing tool add_reaction: {e}") return f"Error adding reaction {emoji} to message {message_id}: {str(e)}" -@mcp.tool() -async def add_multiple_reactions( - channel_id: Annotated[ - str, - Field( - description="The ID of the channel containing the message." - ), - ], - message_id: Annotated[ - str, - Field( - description="The ID of the message to add reactions to." - ), - ], - emojis: Annotated[ - List[str], - Field( - description="A list of emojis to add. Each can be Unicode or custom format `name:id`." - ), - ] -) -> str: +async def add_multiple_reactions(channel_id: str, message_id: str, emojis: List[str]) -> str: """Add multiple reactions to a message (makes individual API calls).""" logger.info(f"Executing tool: add_multiple_reactions {emojis} to message {message_id} in channel {channel_id}") added_emojis = [] @@ -284,28 +205,7 @@ async def add_multiple_reactions( return result_text.strip() - -@mcp.tool() -async def remove_reaction( - channel_id: Annotated[ - str, - Field( - description="The ID of the channel containing the message." - ), - ], - message_id: Annotated[ - str, - Field( - description="The ID of the message to remove the reaction from." - ), - ], - emoji: Annotated[ - str, - Field( - description="The emoji reaction to remove. Can be Unicode or custom format `name:id`." - ), - ] -) -> str: +async def remove_reaction(channel_id: str, message_id: str, emoji: str) -> str: """Remove the bot's own reaction from a message.""" logger.info(f"Executing tool: remove_reaction '{emoji}' from message {message_id} in channel {channel_id}") try: @@ -320,21 +220,7 @@ async def remove_reaction( logger.exception(f"Error executing tool remove_reaction: {e}") return f"Error removing reaction {emoji} from message {message_id}: {str(e)}" -@mcp.tool() -async def send_message( - channel_id: Annotated[ - str, - Field( - description="The ID of the channel to send the message to." - ), - ], - content: Annotated[ - str, - Field( - description="The text content of the message." - ), - ] -) -> Dict[str, Any]: +async def send_message(channel_id: str, content: str) -> Dict[str, Any]: """Send a message to a specific channel.""" logger.info(f"Executing tool: send_message to channel {channel_id}") try: @@ -351,22 +237,7 @@ async def send_message( logger.exception(f"Error executing tool send_message: {e}") raise e -@mcp.tool() -async def read_messages( - channel_id: Annotated[ - str, - Field( - description="The ID of the channel to read messages from." - ), - ], - limit: Annotated[ - int, - Field( - description="The maximum number of messages to retrieve (1-100).", - default=50 - ), - ] = 50 -) -> List[Dict[str, Any]]: +async def read_messages(channel_id: str, limit: int = 50) -> List[Dict[str, Any]]: """Read recent messages from a channel (Default 50, Max 100).""" logger.info(f"Executing tool: read_messages from channel {channel_id}, limit: {limit}") try: @@ -410,15 +281,7 @@ async def read_messages( logger.exception(f"Error executing tool read_messages: {e}") raise e -@mcp.tool() -async def get_user_info( - user_id: Annotated[ - str, - Field( - description="The ID of the Discord user to retrieve information for." - ), - ] -) -> Dict[str, Any]: +async def get_user_info(user_id: str) -> Dict[str, Any]: """Get information about a Discord user.""" logger.info(f"Executing tool: get_user_info for user {user_id}") try: @@ -439,5 +302,419 @@ async def get_user_info( raise e +@click.command() +@click.option("--port", default=DISCORD_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("discord-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="discord_get_server_info", + description="Get information about a Discord server (guild).", + inputSchema={ + "type": "object", + "required": ["server_id"], + "properties": { + "server_id": { + "type": "string", + "description": "The ID of the Discord server (guild) to retrieve information for." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_SERVER", "readOnlyHint": True} + ), + ), + types.Tool( + name="discord_list_members", + description="Get a list of members in a server (Default 100, Max 1000).", + inputSchema={ + "type": "object", + "required": ["server_id"], + "properties": { + "server_id": { + "type": "string", + "description": "The ID of the Discord server (guild)." + }, + "limit": { + "type": "integer", + "description": "The maximum number of members to return (1-1000).", + "default": 100 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_SERVER", "readOnlyHint": True} + ), + ), + types.Tool( + name="discord_create_text_channel", + description="Create a new text channel.", + inputSchema={ + "type": "object", + "required": ["server_id", "name"], + "properties": { + "server_id": { + "type": "string", + "description": "The ID of the Discord server (guild) where the channel will be created." + }, + "name": { + "type": "string", + "description": "The name for the new text channel." + }, + "category_id": { + "type": "string", + "description": "The ID of the category (parent channel) to place the new channel under." + }, + "topic": { + "type": "string", + "description": "The topic for the new channel." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_SERVER"} + ), + ), + types.Tool( + name="discord_add_reaction", + description="Add a reaction to a message.", + inputSchema={ + "type": "object", + "required": ["channel_id", "message_id", "emoji"], + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the message." + }, + "message_id": { + "type": "string", + "description": "The ID of the message to add the reaction to." + }, + "emoji": { + "type": "string", + "description": "The emoji to add as a reaction. Can be a standard Unicode emoji or a custom emoji in the format `name:id`." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_MESSAGE"} + ), + ), + types.Tool( + name="discord_add_multiple_reactions", + description="Add multiple reactions to a message (makes individual API calls).", + inputSchema={ + "type": "object", + "required": ["channel_id", "message_id", "emojis"], + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the message." + }, + "message_id": { + "type": "string", + "description": "The ID of the message to add reactions to." + }, + "emojis": { + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of emojis to add. Each can be Unicode or custom format `name:id`." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_REACTION"} + ), + ), + types.Tool( + name="discord_remove_reaction", + description="Remove the bot's own reaction from a message.", + inputSchema={ + "type": "object", + "required": ["channel_id", "message_id", "emoji"], + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the message." + }, + "message_id": { + "type": "string", + "description": "The ID of the message to remove the reaction from." + }, + "emoji": { + "type": "string", + "description": "The emoji reaction to remove. Can be Unicode or custom format `name:id`." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_REACTION"} + ), + ), + types.Tool( + name="discord_send_message", + description="Send a message to a specific channel.", + inputSchema={ + "type": "object", + "required": ["channel_id", "content"], + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel to send the message to." + }, + "content": { + "type": "string", + "description": "The text content of the message." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_MESSAGE"} + ), + ), + types.Tool( + name="discord_read_messages", + description="Read recent messages from a channel (Default 50, Max 100).", + inputSchema={ + "type": "object", + "required": ["channel_id"], + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel to read messages from." + }, + "limit": { + "type": "integer", + "description": "The maximum number of messages to retrieve (1-100).", + "default": 50 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_MESSAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="discord_get_user_info", + description="Get information about a Discord user.", + inputSchema={ + "type": "object", + "required": ["user_id"], + "properties": { + "user_id": { + "type": "string", + "description": "The ID of the Discord user to retrieve information for." + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "DISCORD_USER", "readOnlyHint": True} + ), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "discord_get_server_info": + server_id = arguments.get("server_id") + result = await get_server_info(server_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_list_members": + server_id = arguments.get("server_id") + limit = arguments.get("limit", 100) + result = await list_members(server_id, limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_create_text_channel": + server_id = arguments.get("server_id") + name = arguments.get("name") + category_id = arguments.get("category_id") + topic = arguments.get("topic") + result = await create_text_channel(server_id, name, category_id, topic) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_add_reaction": + channel_id = arguments.get("channel_id") + message_id = arguments.get("message_id") + emoji = arguments.get("emoji") + result = await add_reaction(channel_id, message_id, emoji) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_add_multiple_reactions": + channel_id = arguments.get("channel_id") + message_id = arguments.get("message_id") + emojis = arguments.get("emojis") + result = await add_multiple_reactions(channel_id, message_id, emojis) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_remove_reaction": + channel_id = arguments.get("channel_id") + message_id = arguments.get("message_id") + emoji = arguments.get("emoji") + result = await remove_reaction(channel_id, message_id, emoji) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_send_message": + channel_id = arguments.get("channel_id") + content = arguments.get("content") + result = await send_message(channel_id, content) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_read_messages": + channel_id = arguments.get("channel_id") + limit = arguments.get("limit", 50) + result = await read_messages(channel_id, limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + elif name == "discord_get_user_info": + user_id = arguments.get("user_id") + result = await get_user_info(user_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + if __name__ == "__main__": - mcp.run(transport="sse") + main() diff --git a/mcp_servers/dropbox/.env.example b/mcp_servers/dropbox/.env.example new file mode 100644 index 00000000..8467109e --- /dev/null +++ b/mcp_servers/dropbox/.env.example @@ -0,0 +1,7 @@ +# Dropbox MCP Server Configuration + +# Server Port Configuration +PORT=5000 + +# Dropbox access token, get it from your Dropbox App settings +DROPBOX_ACCESS_TOKEN=your-api-key-here \ No newline at end of file diff --git a/mcp_servers/dropbox/Dockerfile b/mcp_servers/dropbox/Dockerfile new file mode 100644 index 00000000..56bdad4f --- /dev/null +++ b/mcp_servers/dropbox/Dockerfile @@ -0,0 +1,35 @@ +# Use a Node.js image as the base for building the application +FROM node:22-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/dropbox/package.json mcp_servers/dropbox/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/dropbox . + +# Build the application using TypeScript +RUN npm run build + +# Use a smaller Node.js image for the final image +FROM node:22-slim AS release + +# Set the working directory inside the container +WORKDIR /app + +# Copy the built application from the builder stage +COPY --from=builder /app/build /app/build +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json +# Install only production dependencies +RUN npm ci --omit=dev --ignore-scripts + +EXPOSE 5000 + +# Specify the command to run the application +ENTRYPOINT ["node", "build/src/index.js"] diff --git a/mcp_servers/dropbox/README.md b/mcp_servers/dropbox/README.md new file mode 100644 index 00000000..57c416ab --- /dev/null +++ b/mcp_servers/dropbox/README.md @@ -0,0 +1,78 @@ +# Dropbox MCP Server + +A Model Context Protocol (MCP) server for Dropbox integration. Manage files, folders, and sharing using Dropbox's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Dropbox with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("DROPBOX", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/dropbox-mcp-server:latest + + +# Run Dropbox MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/dropbox-mcp-server:latest + + +# Run Dropbox MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_dropbox_access_token_here"}' \ + ghcr.io/klavis-ai/dropbox-mcp-server:latest +``` + +**OAuth Setup:** Dropbox requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **File Management**: Upload, download, and manage Dropbox files +- **Folder Operations**: Create, move, and organize folders +- **Sharing**: Create and manage shared links and folder permissions +- **Search**: Search files and folders by name and content +- **Metadata**: Access file metadata and version history + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/dropbox/package-lock.json b/mcp_servers/dropbox/package-lock.json new file mode 100644 index 00000000..25c48418 --- /dev/null +++ b/mcp_servers/dropbox/package-lock.json @@ -0,0 +1,1602 @@ +{ + "name": "@klavis-ai/dropbox-mcp-server", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/dropbox-mcp-server", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "@types/mime-types": "^3.0.1", + "dotenv": "^17.2.0", + "dropbox": "^10.34.0", + "express": "^5.1.0", + "get-uri": "^6.0.5", + "mime-types": "^3.0.1", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.22.1" + }, + "bin": { + "dropbox-mcp": "build/index.js" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^20.19.9", + "ts-node": "^10.9.2", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.2.tgz", + "integrity": "sha512-Vx7qOcmoKkR3qhaQ9qf3GxiVKCEu+zfJddHv6x3dY/9P6+uIwJnmuAur5aB+4FDXf41rRrDnOEGkviX5oYZ67w==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz", + "integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRMsfuQbnRq1Ef+C+RKaENOxXX87Ygl38W1vDfPHRku02TgQr+Qd8iivLtAMcR0KF5/29xlnFihkTlbqFrGOVQ==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.9.tgz", + "integrity": "sha512-cuVNgarYWZqxRJDQHEB58GEONhOK79QVR/qYx4S7kcUObQvUwvFnYxJuuHUKm2aieN9X3yZB4LZsuYNU1Qphsw==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", + "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT", + "peer": true + }, + "node_modules/basic-ftp": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", + "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "peer": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dotenv": { + "version": "17.2.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.0.tgz", + "integrity": "sha512-Q4sgBT60gzd0BB0lSyYD3xM4YxrXA9y4uBDof1JNYGzOXrQdQ6yX+7XIAqoFOGQFOTK1D3Hts5OllpxMDZFONQ==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dropbox": { + "version": "10.34.0", + "resolved": "https://registry.npmjs.org/dropbox/-/dropbox-10.34.0.tgz", + "integrity": "sha512-5jb5/XzU0fSnq36/hEpwT5/QIep7MgqKuxghEG44xCu7HruOAjPdOb3x0geXv5O/hd0nHpQpWO+r5MjYTpMvJg==", + "license": "MIT", + "dependencies": { + "node-fetch": "^2.6.1" + }, + "engines": { + "node": ">=0.10.3" + }, + "peerDependencies": { + "@types/node-fetch": "^2.5.7" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.3.tgz", + "integrity": "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "peer": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/form-data/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "peer": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-uri": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.5.tgz", + "integrity": "sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==", + "license": "MIT", + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "peer": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/zod": { + "version": "3.25.67", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.67.tgz", + "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/dropbox/package.json b/mcp_servers/dropbox/package.json new file mode 100644 index 00000000..cb34dbb1 --- /dev/null +++ b/mcp_servers/dropbox/package.json @@ -0,0 +1,42 @@ +{ + "name": "@klavis-ai/dropbox-mcp-server", + "version": "1.0.0", + "description": "Dropbox MCP server", + "type": "module", + "main": "build/index.js", + "bin": { + "dropbox-mcp": "./build/index.js" + }, + "scripts": { + "build": "tsc", + "start": "node build/src/index.js", + "prepare": "npm run build", + "prepublishOnly": "npm run build" + }, + "files": [ + "dist", + "README.md" + ], + "author": "Klavis AI", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "@types/mime-types": "^3.0.1", + "dotenv": "^17.2.0", + "dropbox": "^10.34.0", + "express": "^5.1.0", + "get-uri": "^6.0.5", + "mime-types": "^3.0.1", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.22.1" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^20.19.9", + "ts-node": "^10.9.2", + "typescript": "^5.3.3" + } +} diff --git a/mcp_servers/dropbox/src/error.ts b/mcp_servers/dropbox/src/error.ts new file mode 100644 index 00000000..95fd1cc6 --- /dev/null +++ b/mcp_servers/dropbox/src/error.ts @@ -0,0 +1,38 @@ +/** + * Custom error class for the Dropbox MCP module + */ +export class DropboxMCPError extends Error { + public type: string; + public errorModule: string; + public status: string; + public error_summary : string; + + constructor(type: string, errorModule: string, message: string, status?: string) { + super(message); + this.name = 'DropboxMCPError'; + this.type = type; + this.errorModule = errorModule; + this.status = status || '400'; + this.error_summary = `${type} (${errorModule}): ${message}`; + } +} + +/** + * Error types + */ +export const ErrorTypes = { + DROPBOX_API_ERROR: 'DROPBOX_API_ERROR', + GET_URI_ERROR: 'GET_URI_ERROR', + OTHERS_ERROR: 'OTHERS_ERROR', + UNKNOWN_ERROR: 'UNKNOWN_ERROR' +} as const; + +/** + * Error modules + */ +export const ErrorModules = { + DROPBOX_SDK: 'dropbox-sdk', + GET_URI: 'get-uri', + OTHERS: 'others', + UNKNOWN: 'unknown' +} as const; diff --git a/mcp_servers/dropbox/src/handlers/account.ts b/mcp_servers/dropbox/src/handlers/account.ts new file mode 100644 index 00000000..b10ba470 --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/account.ts @@ -0,0 +1,146 @@ +import { CallToolRequest, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import * as schemas from "../schemas/index.js"; +import { getDropboxClient } from "../utils/context.js"; + +export async function handleGetCurrentAccount(args: any) { + schemas.GetCurrentAccountSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.usersGetCurrentAccount(); + const account = response.result as any; + + // Build comprehensive account information + let accountInfo = `=== Dropbox Account Information ===\n\n`; + + // Basic Information + accountInfo += `Account ID: ${account.account_id || 'Unknown'}\n`; + accountInfo += `Email: ${account.email || 'Unknown'}\n`; + accountInfo += `Email Verified: ${account.email_verified ? 'Yes' : 'No'}\n`; + accountInfo += `Account Disabled: ${account.disabled ? 'Yes' : 'No'}\n`; + accountInfo += `Locale: ${account.locale || 'Unknown'}\n`; + accountInfo += `Country: ${account.country || 'Unknown'}\n`; + + // Name Information + if (account.name) { + accountInfo += `\n--- Name Details ---\n`; + accountInfo += `Display Name: ${account.name.display_name || 'Unknown'}\n`; + accountInfo += `Given Name: ${account.name.given_name || 'Unknown'}\n`; + accountInfo += `Surname: ${account.name.surname || 'Unknown'}\n`; + accountInfo += `Familiar Name: ${account.name.familiar_name || 'Unknown'}\n`; + accountInfo += `Abbreviated Name: ${account.name.abbreviated_name || 'Unknown'}\n`; + } + + // Account Type + if (account.account_type) { + accountInfo += `\n--- Account Type ---\n`; + accountInfo += `Type: ${account.account_type['.tag'] || 'Unknown'}\n`; + } + + // Profile and Links + accountInfo += `\n--- Profile & Links ---\n`; + accountInfo += `Referral Link: ${account.referral_link || 'Not available'}\n`; + if (account.profile_photo_url) { + accountInfo += `Profile Photo: ${account.profile_photo_url}\n`; + } + + // Pairing Information + accountInfo += `\n--- Account Pairing ---\n`; + accountInfo += `Is Paired (has work account): ${account.is_paired ? 'Yes' : 'No'}\n`; + + // Root Information + if (account.root_info) { + accountInfo += `\n--- Root Information ---\n`; + accountInfo += `Root Type: ${account.root_info['.tag'] || 'Unknown'}\n`; + accountInfo += `Home Namespace ID: ${account.root_info.home_namespace_id || 'Unknown'}\n`; + accountInfo += `Root Namespace ID: ${account.root_info.root_namespace_id || 'Unknown'}\n`; + } + + // Team Information (if applicable) + if (account.team) { + accountInfo += `\n--- Team Information ---\n`; + accountInfo += `Team ID: ${account.team.id || 'Unknown'}\n`; + accountInfo += `Team Name: ${account.team.name || 'Unknown'}\n`; + if (account.team_member_id) { + accountInfo += `Team Member ID: ${account.team_member_id}\n`; + } + + // Team Policies + if (account.team.sharing_policies) { + accountInfo += `\n--- Team Sharing Policies ---\n`; + const policies = account.team.sharing_policies; + if (policies.shared_link_create_policy) { + accountInfo += `Shared Link Creation: ${policies.shared_link_create_policy['.tag'] || 'Unknown'}\n`; + } + if (policies.shared_folder_member_policy) { + accountInfo += `Shared Folder Member Policy: ${policies.shared_folder_member_policy['.tag'] || 'Unknown'}\n`; + } + if (policies.shared_folder_join_policy) { + accountInfo += `Shared Folder Join Policy: ${policies.shared_folder_join_policy['.tag'] || 'Unknown'}\n`; + } + if (policies.enforce_link_password_policy) { + accountInfo += `Link Password Policy: ${policies.enforce_link_password_policy['.tag'] || 'Unknown'}\n`; + } + if (policies.default_link_expiration_days_policy) { + accountInfo += `Default Link Expiration: ${policies.default_link_expiration_days_policy['.tag'] || 'Unknown'}\n`; + } + } + + if (account.team.office_addin_policy) { + accountInfo += `Office Add-in Policy: ${account.team.office_addin_policy['.tag'] || 'Unknown'}\n`; + } + + if (account.team.top_level_content_policy) { + accountInfo += `Top Level Content Policy: ${account.team.top_level_content_policy['.tag'] || 'Unknown'}\n`; + } + } + + return { + content: [ + { + type: "text", + text: accountInfo, + }, + ], + }; +} + +export async function handleGetSpaceUsage(args: any) { + schemas.GetSpaceUsageSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.usersGetSpaceUsage(); + const spaceInfo = response.result as any; + let info = `Used: ${spaceInfo.used} bytes`; + if (spaceInfo.allocation) { + if (spaceInfo.allocation['.tag'] === 'individual') { + info += `\nAllocated: ${spaceInfo.allocation.allocated} bytes`; + } else { + info += `\nAllocation Type: ${spaceInfo.allocation['.tag']}`; + } + } + + return { + content: [ + { + type: "text", + text: info, + }, + ], + }; +} + +/** + * Main handler for account operations + */ +export async function handleAccountOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_get_current_account": + return await handleGetCurrentAccount(args) as CallToolResult; + case "dropbox_get_space_usage": + return await handleGetSpaceUsage(args) as CallToolResult; + default: + throw new Error(`Unknown account operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/handlers/batch-operations.ts b/mcp_servers/dropbox/src/handlers/batch-operations.ts new file mode 100644 index 00000000..8471c431 --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/batch-operations.ts @@ -0,0 +1,462 @@ +import { CallToolRequest, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import * as schemas from "../schemas/index.js"; +import { getDropboxClient } from "../utils/context.js"; + +export async function handleBatchDelete(args: any) { + const validatedArgs = schemas.BatchDeleteSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesDeleteBatch({ + entries: validatedArgs.entries, + }); + + const result = response.result as any; + + // Handle both sync and async responses + if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.filter((entry: any) => entry['.tag'] === 'failure').length; + + let resultMessage = `Batch delete completed:\n`; + resultMessage += `Successful: ${successful}\n`; + resultMessage += `Failed: ${failed}`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any) => ` - ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'async_job_id') { + return { + content: [ + { + type: "text", + text: `Batch delete started (async operation)\nJob ID: ${result.async_job_id}\n\nThe operation is processing in the background.\nUse 'check_batch_job_status' with this Job ID to monitor progress and get final results.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Batch delete initiated. Processing ${validatedArgs.entries.length} entries.`, + }, + ], + }; + } +} + +export async function handleBatchMove(args: any) { + const validatedArgs = schemas.BatchMoveSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesMoveBatchV2({ + entries: validatedArgs.entries, + autorename: validatedArgs.autorename, + allow_ownership_transfer: validatedArgs.allow_ownership_transfer, + }); + + const result = response.result as any; + + // Handle both sync and async responses + if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.filter((entry: any) => entry['.tag'] === 'failure').length; + + let resultMessage = `Batch move completed:\n`; + resultMessage += `Successful: ${successful}\n`; + resultMessage += `Failed: ${failed}`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any) => ` - ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'async_job_id') { + return { + content: [ + { + type: "text", + text: `Batch move started (async operation)\nJob ID: ${result.async_job_id}\nThe operation is processing in the background. Use the job ID to check status.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Batch move initiated. Processing ${validatedArgs.entries.length} entries.`, + }, + ], + }; + } +} + +export async function handleBatchCopy(args: any) { + const validatedArgs = schemas.BatchCopySchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesCopyBatchV2({ + entries: validatedArgs.entries, + }); + + const result = response.result as any; + + // Handle both sync and async responses + if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.filter((entry: any) => entry['.tag'] === 'failure').length; + + let resultMessage = `Batch copy completed:\n`; + resultMessage += `Successful: ${successful}\n`; + resultMessage += `Failed: ${failed}`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any) => ` - ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'async_job_id') { + return { + content: [ + { + type: "text", + text: `Batch copy started (async operation)\nJob ID: ${result.async_job_id}\n\nThe operation is processing in the background.\nNext Steps:\n1. Use 'check_batch_job_status' tool with this Job ID\n2. Monitor progress until completion\n3. The tool will show final results (successful / failed counts)\n\nTip: Large batches or many files typically trigger async processing.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Batch copy initiated. Processing ${validatedArgs.entries.length} entries.`, + }, + ], + }; + } +} + +export async function handleCheckBatchJobStatus(args: any) { + const validatedArgs = schemas.BatchJobStatusSchema.parse(args); + const dropbox = getDropboxClient(); + + // Try checking different types of batch operations + let statusResponse; + let operationType = "operation"; + + // First try copy batch check + try { + statusResponse = await dropbox.filesCopyBatchCheckV2({ + async_job_id: validatedArgs.async_job_id, + }); + operationType = "copy"; + } catch (copyError: any) { + // If copy check fails, try move batch check + try { + statusResponse = await dropbox.filesMoveBatchCheckV2({ + async_job_id: validatedArgs.async_job_id, + }); + operationType = "move"; + } catch (moveError: any) { + // If move check fails, try delete batch check + try { + statusResponse = await dropbox.filesDeleteBatchCheck({ + async_job_id: validatedArgs.async_job_id, + }); + operationType = "delete"; + } catch (deleteError: any) { + throw new Error(`Unable to check job status. Job ID may be invalid or expired: ${validatedArgs.async_job_id}`); + } + } + } + + const result = statusResponse.result as any; + + if (result['.tag'] === 'in_progress') { + return { + content: [ + { + type: "text", + text: `Batch ${operationType} operation is still in progress.\nJob ID: ${validatedArgs.async_job_id}\nStatus: Processing...`, + }, + ], + }; + } else if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.filter((entry: any) => entry['.tag'] === 'failure').length; + + let resultMessage = `Batch ${operationType} operation completed!\n`; + resultMessage += `Job ID: ${validatedArgs.async_job_id}\n`; + resultMessage += `Successful: ${successful}\n`; + resultMessage += `Failed: ${failed}`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any, index: number) => ` ${index + 1}. ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'failed') { + return { + content: [ + { + type: "text", + text: `Batch ${operationType} operation failed.\nJob ID: ${validatedArgs.async_job_id}\nError: ${result.reason || 'Unknown error'}`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Batch ${operationType} operation status: ${result['.tag'] || 'Unknown'}\nJob ID: ${validatedArgs.async_job_id}`, + }, + ], + }; + } +} + +export async function handleLockFileBatch(args: any) { + const validatedArgs = schemas.LockFileBatchSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesLockFileBatch({ + entries: validatedArgs.entries, + }); + + const result = response.result as any; + + // Check if response has entries directly (sync response) + if (result.entries) { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.length - successful; + + let resultMessage = `File locking batch operation completed!\n\n`; + resultMessage += `Successfully locked: ${successful} file(s)\n`; + resultMessage += `Failed to lock: ${failed} file(s)`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any, index: number) => ` ${index + 1}. ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } + // Check if response is immediate or async (with .tag) + else if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.length - successful; + + let resultMessage = `File locking batch operation completed!\n\n`; + resultMessage += `Successfully locked: ${successful} file(s)\n`; + resultMessage += `Failed to lock: ${failed} file(s)`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any, index: number) => ` ${index + 1}. ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'async_job_id') { + const jobId = result.async_job_id; + return { + content: [ + { + type: "text", + text: `File locking batch operation started (large batch detected)\n\nJob ID: ${jobId}\n\nUse 'check_batch_job_status' with this job ID to monitor progress.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Unknown response from file locking operation: ${result['.tag'] || 'undefined'}\nFull response: ${JSON.stringify(result, null, 2)}`, + }, + ], + }; + } +} + +export async function handleUnlockFileBatch(args: any) { + const validatedArgs = schemas.UnlockFileBatchSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesUnlockFileBatch({ + entries: validatedArgs.entries, + }); + + const result = response.result as any; + + // Check if response has entries directly (sync response) + if (result.entries) { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.length - successful; + + let resultMessage = `File unlocking batch operation completed!\n\n`; + resultMessage += `Successfully unlocked: ${successful} file(s)\n`; + resultMessage += `Failed to unlock: ${failed} file(s)`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any, index: number) => ` ${index + 1}. ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } + // Check if response is immediate or async (with .tag) + else if (result['.tag'] === 'complete') { + const entries = result.entries || []; + const successful = entries.filter((entry: any) => entry['.tag'] === 'success').length; + const failed = entries.length - successful; + + let resultMessage = `File unlocking batch operation completed!\n\n`; + resultMessage += `Successfully unlocked: ${successful} file(s)\n`; + resultMessage += `Failed to unlock: ${failed} file(s)`; + + if (failed > 0) { + const failureDetails = entries + .filter((entry: any) => entry['.tag'] === 'failure') + .map((entry: any, index: number) => ` ${index + 1}. ${entry.failure?.reason || 'Unknown error'}`) + .join('\n'); + resultMessage += `\n\nFailure details:\n${failureDetails}`; + } + + return { + content: [ + { + type: "text", + text: resultMessage, + }, + ], + }; + } else if (result['.tag'] === 'async_job_id') { + const jobId = result.async_job_id; + return { + content: [ + { + type: "text", + text: `File unlocking batch operation started (large batch detected)\n\nJob ID: ${jobId}\n\nUse 'check_batch_job_status' with this job ID to monitor progress.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `Unknown response from file unlocking operation: ${result['.tag'] || 'undefined'}\nFull response: ${JSON.stringify(result, null, 2)}`, + }, + ], + }; + } +} + +/** + * Main handler for batch operations + */ +export async function handleBatchOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_batch_delete": + return await handleBatchDelete(args) as CallToolResult; + case "dropbox_batch_move": + return await handleBatchMove(args) as CallToolResult; + case "dropbox_batch_copy": + return await handleBatchCopy(args) as CallToolResult; + case "dropbox_check_batch_job_status": + return await handleCheckBatchJobStatus(args) as CallToolResult; + case "dropbox_lock_file_batch": + return await handleLockFileBatch(args) as CallToolResult; + case "dropbox_unlock_file_batch": + return await handleUnlockFileBatch(args) as CallToolResult; + default: + throw new Error(`Unknown batch operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/handlers/file-operations.ts b/mcp_servers/dropbox/src/handlers/file-operations.ts new file mode 100644 index 00000000..b50af56b --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/file-operations.ts @@ -0,0 +1,482 @@ +import { getDropboxClient } from '../utils/context.js'; +import { + UploadFileSchema, + DownloadFileSchema, + GetThumbnailSchema, + GetTemporaryLinkSchema, + ListRevisionsSchema, + RestoreFileSchema, + SaveUrlSchema, + SaveUrlCheckJobStatusSchema +} from '../schemas/index.js'; +import { CallToolRequest, CallToolResult, ImageContent, AudioContent } from "@modelcontextprotocol/sdk/types.js"; +import type { ReadResourceResult } from "@modelcontextprotocol/sdk/types.js"; +import { getUri } from 'get-uri'; +import { lookup } from 'mime-types'; +import { + dropboxResourceUriToPath, + isFolderPath +} from '../utils/path-url-handling.js'; +import { DropboxMCPError, ErrorModules, ErrorTypes } from '../error.js'; +import { wrapGetUriError } from '../utils/error-msg.js'; + +/** + * Creates an ImageContent object for MCP responses + */ +function createImageContent(data: Buffer, mimeType: string): ImageContent { + return { + type: "image" as const, + data: data.toString('base64'), + mimeType: mimeType, + }; +} + +/** + * Create an AudioContent object for MCP responses + */ +function createAudioContent(data: Buffer, mimeType: string): AudioContent { + return { + type: "audio" as const, + data: data.toString('base64'), + mimeType: mimeType, + }; +} + +/** + * Detects file type and MIME type based on file extension using mime-types library + */ +export function detectFileType(fileName: string): { mimeType: string; contentType: 'image' | 'audio' | 'text' | 'binary' } { + // Use mime-types library to get MIME type from filename + const mimeType = lookup(fileName) || 'application/octet-stream'; + + // Determine content type category based on MIME type + if (mimeType.startsWith('image/')) { + return { mimeType, contentType: 'image' }; + } + + if (mimeType.startsWith('audio/')) { + return { mimeType, contentType: 'audio' }; + } + + if (mimeType.startsWith('text/') || + ['application/json', 'application/xml', 'application/javascript', 'application/typescript'].includes(mimeType)) { + return { mimeType, contentType: 'text' }; + } + + // Everything else is binary + return { mimeType, contentType: 'binary' }; +} + +export async function handleUploadFile(args: any) { + const validatedArgs = UploadFileSchema.parse(args); + const dropbox = getDropboxClient(); + + let source = validatedArgs.source_uri; + // No allow file:// URIs for security reasons + if (source.startsWith('file://')) { + throw new DropboxMCPError(ErrorTypes.OTHERS_ERROR, ErrorModules.OTHERS, + "File URIs are not allowed for security reasons. Please use http://, https://, ftp://, or data: URIs." + ); + } + const targetPath = validatedArgs.dropbox_path; + + // Get stream from URI using get-uri (supports http://, https://, ftp://, data:.) + let stream: NodeJS.ReadableStream; + + try { + stream = await getUri(source); + } catch (error) { + wrapGetUriError(error, source); + } + + // Use chunked upload for all files to avoid Node.js fetch duplex issues + // and handle both small and large files consistently + return await handleChunkedUpload(dropbox, stream, targetPath, validatedArgs, source); +} + +/** + * Handle large file uploads using Dropbox's chunked upload session + */ +async function handleChunkedUpload( + dropbox: any, + stream: NodeJS.ReadableStream, + targetPath: string, + validatedArgs: any, + source: string +) { + const CHUNK_SIZE = 4 * 1024 * 1024; // 4MB chunks + let sessionId: string | undefined; + let uploadedBytes = 0; + let buffer = Buffer.alloc(0); + + try { + // Start upload session + const startResponse = await dropbox.filesUploadSessionStart({ + close: false, + contents: Buffer.alloc(0), + }); + sessionId = startResponse.result.session_id; + + // Process stream in chunks + for await (const chunk of stream) { + buffer = Buffer.concat([buffer, Buffer.from(chunk)]); + + // If we have enough data for a chunk, upload it + while (buffer.length >= CHUNK_SIZE) { + const chunkToUpload = buffer.slice(0, CHUNK_SIZE); + buffer = buffer.slice(CHUNK_SIZE); + + await dropbox.filesUploadSessionAppendV2({ + cursor: { + session_id: sessionId, + offset: uploadedBytes, + }, + close: false, + contents: chunkToUpload, + }); + + uploadedBytes += chunkToUpload.length; + } + } + + // Upload any remaining data and finish the session + const finishResponse = await dropbox.filesUploadSessionFinish({ + cursor: { + session_id: sessionId, + offset: uploadedBytes, + }, + commit: { + path: targetPath, + mode: validatedArgs.mode as any, + autorename: validatedArgs.autorename, + mute: validatedArgs.mute, + }, + contents: buffer, // Upload remaining data + }); + + const totalSize = uploadedBytes + buffer.length; + + return { + content: [ + { + type: "text", + text: `File uploaded successfully (chunked upload)!\n\nSource URI: ${source}\nDropbox path: ${finishResponse.result.path_display}\nFile size: ${finishResponse.result.size} bytes (${(finishResponse.result.size / 1024 / 1024).toFixed(2)} MB)\nUploaded size: ${totalSize} bytes (${(totalSize / 1024 / 1024).toFixed(2)} MB)\nUpload mode: ${validatedArgs.mode}\nAutorename: ${validatedArgs.autorename}`, + }, + ], + }; + } catch (error) { + // If we started a session but failed, we should consider cleaning up + // but Dropbox sessions auto-expire, so it's not critical + throw error; + } +} + +export async function handleDownloadFile(args: any) { + const validatedArgs = DownloadFileSchema.parse(args); + const dropbox = getDropboxClient(); + const path = validatedArgs.path; + if (!isFolderPath(path)) { + const { mimeType, contentType } = detectFileType(path); + if (['image', 'audio', 'text'].includes(contentType)) { + const response = await dropbox.filesDownload({ + path: path, + }); + const result = response.result as any; + const fileBuffer = Buffer.from(result.fileBinary, 'binary'); + let content; + switch (contentType) { + case 'image': + content = createImageContent(fileBuffer, mimeType); + break; + case 'audio': + content = createAudioContent(fileBuffer, mimeType); + break; + case 'text': + content = { + type: "text", + text: fileBuffer.toString('utf8'), + } + } + return { + content: [content], + }; + } + } + return { + content: [ + { + type: "text", + text: `Use resources/read to access the fileContent at URI: dropbox://${path}`, + }, + ], + + }; +} + +export async function handleGetThumbnail(args: any) { + const validatedArgs = GetThumbnailSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesGetThumbnailV2({ + resource: { ".tag": "path", path: validatedArgs.path }, + format: { ".tag": validatedArgs.format }, + size: { ".tag": validatedArgs.size }, + }); + + const result = response.result as any; + + const thumbnailBuffer = Buffer.isBuffer(result.fileBinary) + ? result.fileBinary + : Buffer.from(result.fileBinary, 'binary'); + + const base64Thumbnail = thumbnailBuffer.toString('base64'); + const mimeType = `image/${validatedArgs.format}`; + + // Create image content using the helper function + const imageContent = createImageContent(base64Thumbnail, mimeType); + + return { + content: [imageContent], + }; +} + +export async function handleGetTemporaryLink(args: any) { + const validatedArgs = GetTemporaryLinkSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesGetTemporaryLink({ + path: validatedArgs.path, + }); + + return { + content: [ + { + type: "text", + text: `Temporary link: ${response.result.link}`, + }, + ], + }; +} + +export async function handleListRevisions(args: any) { + const validatedArgs = ListRevisionsSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesListRevisions({ + path: validatedArgs.path, + mode: validatedArgs.mode as any, + limit: validatedArgs.limit, + }); + + const revisions = (response.result as any).entries?.map((rev: any) => + `Revision ID: ${rev.rev} - Modified: ${rev.server_modified} - Size: ${rev.size} bytes` + ) || []; + + return { + content: [ + { + type: "text", + text: `Revisions for file "${validatedArgs.path}":\n\n${revisions.join('\n') || 'No revisions found'}`, + }, + ], + }; +} + +export async function handleRestoreFile(args: any) { + const validatedArgs = RestoreFileSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesRestore({ + path: validatedArgs.path, + rev: validatedArgs.rev, + }); + + return { + content: [ + { + type: "text", + text: `File restored to revision ${validatedArgs.rev}: ${(response.result as any).path_display}`, + }, + ], + }; +} + +export async function handleSaveUrl(args: any) { + const validatedArgs = SaveUrlSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesSaveUrl({ + path: validatedArgs.path, + url: validatedArgs.url, + }); + + if (response.result['.tag'] === 'complete') { + const result = response.result as any; + return { + content: [ + { + type: "text", + text: `URL saved successfully!\n\nFile: ${result.path_display || validatedArgs.path}\nSource URL: ${validatedArgs.url}\nSize: ${result.size || 'Unknown'} bytes`, + }, + ], + }; + } else if (response.result['.tag'] === 'async_job_id') { + return { + content: [ + { + type: "text", + text: `URL save started (async operation)\nJob ID: ${response.result.async_job_id}\n\nSource URL: ${validatedArgs.url}\nDestination: ${validatedArgs.path}\n\nUse 'save_url_check_job_status' with this Job ID to check progress.`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `URL save initiated for: ${validatedArgs.url} -> ${validatedArgs.path}`, + }, + ], + }; + } +} + +export async function handleSaveUrlCheckJobStatus(args: any) { + const validatedArgs = SaveUrlCheckJobStatusSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesSaveUrlCheckJobStatus({ + async_job_id: validatedArgs.async_job_id, + }); + + const result = response.result; + + if (result['.tag'] === 'in_progress') { + return { + content: [ + { + type: "text", + text: `URL save operation is still in progress.\nJob ID: ${validatedArgs.async_job_id}\nStatus: Processing...`, + }, + ], + }; + } else if (result['.tag'] === 'complete') { + const completeResult = result as any; + return { + content: [ + { + type: "text", + text: `URL save completed!\nJob ID: ${validatedArgs.async_job_id}\nFile: ${completeResult.path_display}\nSize: ${completeResult.size} bytes`, + }, + ], + }; + } else if (result['.tag'] === 'failed') { + const failedResult = result as any; + return { + content: [ + { + type: "text", + text: `URL save failed.\nJob ID: ${validatedArgs.async_job_id}\nError: ${failedResult.reason || 'Unknown error'}`, + }, + ], + }; + } else { + return { + content: [ + { + type: "text", + text: `URL save status: ${result['.tag'] || 'Unknown'}\nJob ID: ${validatedArgs.async_job_id}`, + }, + ], + }; + } +} + +export async function handleReadResource(uri: string): Promise { + if (!uri.startsWith('dropbox://')) { + throw new Error('Invalid resource URI. Must start with dropbox://'); + } + + const filePath = dropboxResourceUriToPath(uri); + + try { + const dropboxClient = getDropboxClient(); + const response = await dropboxClient.filesDownload({ + path: filePath, + }); + + const result = response.result as any; + let fileBuffer: Buffer | undefined; + + // Extract file content + if (result.fileBinary) { + if (Buffer.isBuffer(result.fileBinary)) { + fileBuffer = result.fileBinary; + } else { + // According to official examples, fileBinary should be treated as binary data + fileBuffer = Buffer.from(result.fileBinary, 'binary'); + } + } + + if (fileBuffer) { + const fileName = result.name || 'Unknown file'; + const { mimeType } = detectFileType(fileName); + + if (mimeType.startsWith('text/')) { + // Return text content directly + return { + contents: [ + { + uri: uri, + mimeType: mimeType, + text: fileBuffer.toString('utf8'), + }, + ], + }; + } else { + // Return binary content as base64 + return { + contents: [ + { + uri: uri, + mimeType: mimeType, + blob: fileBuffer.toString('base64'), + }, + ], + }; + } + } + + throw new Error('Failed to extract file content'); + } catch (error: any) { + throw new Error(`Failed to read resource: ${error.message}`); + } +} + +/** + * Main handler for file operations + */ +export async function handleFileOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_upload_file": + return await handleUploadFile(args) as CallToolResult; + case "dropbox_download_file": + return await handleDownloadFile(args) as CallToolResult; + case "dropbox_get_thumbnail": + return await handleGetThumbnail(args) as CallToolResult; + case "dropbox_list_revisions": + return await handleListRevisions(args) as CallToolResult; + case "dropbox_restore_file": + return await handleRestoreFile(args) as CallToolResult; + case "dropbox_save_url": + return await handleSaveUrl(args) as CallToolResult; + case "dropbox_save_url_check_job_status": + return await handleSaveUrlCheckJobStatus(args) as CallToolResult; + case "dropbox_get_temporary_link": + return await handleGetTemporaryLink(args) as CallToolResult; + default: + throw new Error(`Unknown file operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/handlers/file-requests.ts b/mcp_servers/dropbox/src/handlers/file-requests.ts new file mode 100644 index 00000000..cb8d07bd --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/file-requests.ts @@ -0,0 +1,165 @@ +import { CallToolRequest, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import * as schemas from "../schemas/index.js"; +import { getDropboxClient } from "../utils/context.js"; + +/** + * Handle create file request operation + */ +async function handleCreateFileRequest(args: any): Promise { + const validatedArgs = schemas.CreateFileRequestSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.fileRequestsCreate({ + title: validatedArgs.title, + destination: validatedArgs.destination, + description: validatedArgs.description, + }); + + const fileRequest = response.result; + return { + content: [ + { + type: "text", + text: `File request created successfully!\nID: ${fileRequest.id}\nTitle: ${fileRequest.title}\nURL: ${fileRequest.url}\nDestination: ${fileRequest.destination}${fileRequest.description ? `\nDescription: ${fileRequest.description}` : ''}`, + }, + ], + }; +} + +/** + * Handle get file request operation + */ +async function handleGetFileRequest(args: any): Promise { + const validatedArgs = schemas.GetFileRequestSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.fileRequestsGet({ + id: validatedArgs.id, + }); + + const fileRequest = response.result; + let info = `ID: ${fileRequest.id}\nTitle: ${fileRequest.title}\nDestination: ${fileRequest.destination}\nFile Count: ${fileRequest.file_count}\nURL: ${fileRequest.url}`; + + if (fileRequest.deadline) { + info += `\nDeadline: ${fileRequest.deadline.deadline}`; + } + if (fileRequest.description) { + info += `\nDescription: ${fileRequest.description}`; + } + + return { + content: [ + { + type: "text", + text: info, + }, + ], + }; +} + +/** + * Handle list file requests operation + */ +async function handleListFileRequests(args: any): Promise { + schemas.ListFileRequestsSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.fileRequestsList(); + + const fileRequests = response.result.file_requests.map((request: any) => + `ID: ${request.id} - Title: ${request.title} - Destination: ${request.destination} - File Count: ${request.file_count} - Status: ${request.is_open ? 'Open' : 'Closed'}` + ); + + return { + content: [ + { + type: "text", + text: `File requests:\n\n${fileRequests.join('\n') || 'No file requests found'}`, + }, + ], + }; +} + +/** + * Handle delete file request operation + */ +async function handleDeleteFileRequest(args: any): Promise { + const validatedArgs = schemas.DeleteFileRequestSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.fileRequestsDelete({ + ids: validatedArgs.ids, + }); + + return { + content: [ + { + type: "text", + text: `File request(s) deleted successfully: ${validatedArgs.ids.join(', ')}`, + }, + ], + }; +} + +/** + * Handle update file request operation + */ +async function handleUpdateFileRequest(args: any): Promise { + const validatedArgs = schemas.UpdateFileRequestSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.fileRequestsUpdate({ + id: validatedArgs.id, + title: validatedArgs.title, + destination: validatedArgs.destination, + description: validatedArgs.description, + open: validatedArgs.open, + }); + + const request = response.result; + let statusMessage = `File request updated successfully:\n`; + statusMessage += `ID: ${request.id}\n`; + statusMessage += `Title: ${request.title}\n`; + statusMessage += `Destination: ${request.destination}\n`; + if (request.description) { + statusMessage += `Description: ${request.description}\n`; + } + statusMessage += `Status: ${request.is_open ? 'Open' : 'Closed'}\n`; + statusMessage += `File Count: ${request.file_count}`; + + return { + content: [ + { + type: "text", + text: statusMessage, + }, + ], + }; +} + +/** + * Handler for file request operations + */ +export async function handleFileRequestOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_create_file_request": + return await handleCreateFileRequest(args); + + case "dropbox_get_file_request": + return await handleGetFileRequest(args); + + case "dropbox_list_file_requests": + return await handleListFileRequests(args); + + case "dropbox_delete_file_request": + return await handleDeleteFileRequest(args); + + case "dropbox_update_file_request": + return await handleUpdateFileRequest(args); + + default: + throw new Error(`Unknown file request operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/handlers/files.ts b/mcp_servers/dropbox/src/handlers/files.ts new file mode 100644 index 00000000..187cc47e --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/files.ts @@ -0,0 +1,347 @@ +import { getDropboxClient } from '../utils/context.js'; +import { + ListFolderSchema, + ListFolderContinueSchema, + CreateFolderSchema, + DeleteFileSchema, + MoveFileSchema, + CopyFileSchema, + GetFileInfoSchema, + SearchFilesSchema, + SearchFilesContinueSchema +} from '../schemas/index.js'; +import { CallToolRequest, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; + +/** + * Handle list folder operation + */ +async function handleListFolder(args: any): Promise { + const validatedArgs = ListFolderSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesListFolder({ + path: validatedArgs.path, + recursive: validatedArgs.recursive, + include_media_info: validatedArgs.include_media_info, + include_deleted: validatedArgs.include_deleted, + include_has_explicit_shared_members: validatedArgs.include_has_explicit_shared_members, + limit: validatedArgs.limit, + }); + + const entries = response.result.entries.map((entry: any) => { + if (entry['.tag'] === 'file') { + return `File: ${entry.name} (${entry.path_display}) - Size: ${entry.size} bytes, Modified: ${entry.server_modified}`; + } else if (entry['.tag'] === 'folder') { + return `Folder: ${entry.name} (${entry.path_display})`; + } else { + return `${entry['.tag']}: ${entry.name} (${entry.path_display})`; + } + }); + + let resultText = `Contents of folder "${validatedArgs.path || '/'}":\n\n${entries.join('\n') || 'Empty folder'}`; + + // Add pagination info if there are more results + if (response.result.has_more) { + resultText += `\n\nMore results available. Use 'list_folder_continue' with cursor: ${response.result.cursor}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle list folder continue operation + */ +async function handleListFolderContinue(args: any): Promise { + const validatedArgs = ListFolderContinueSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesListFolderContinue({ + cursor: validatedArgs.cursor, + }); + + const entries = response.result.entries.map((entry: any) => { + if (entry['.tag'] === 'file') { + return `File: ${entry.name} (${entry.path_display}) - Size: ${entry.size} bytes, Modified: ${entry.server_modified}`; + } else if (entry['.tag'] === 'folder') { + return `Folder: ${entry.name} (${entry.path_display})`; + } else { + return `${entry['.tag']}: ${entry.name} (${entry.path_display})`; + } + }); + + let resultText = `Continued folder contents:\n\n${entries.join('\n') || 'No more items'}`; + + // Add pagination info if there are more results + if (response.result.has_more) { + resultText += `\n\nMore results available. Use 'list_folder_continue' with cursor: ${response.result.cursor}`; + } else { + resultText += `\n\nEnd of folder contents reached.`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle create folder operation + */ +async function handleCreateFolder(args: any): Promise { + const validatedArgs = CreateFolderSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesCreateFolderV2({ + path: validatedArgs.path, + autorename: validatedArgs.autorename, + }); + + return { + content: [ + { + type: "text", + text: `Folder created successfully: ${response.result.metadata.path_display}`, + }, + ], + }; +} + +/** + * Handle delete file operation + */ +async function handleDeleteFile(args: any): Promise { + const validatedArgs = DeleteFileSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesDeleteV2({ + path: validatedArgs.path, + }); + + return { + content: [ + { + type: "text", + text: `File/folder deleted successfully: ${response.result.metadata.path_display}`, + }, + ], + }; +} + +/** + * Handle move file operation + */ +async function handleMoveFile(args: any): Promise { + const validatedArgs = MoveFileSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesMoveV2({ + from_path: validatedArgs.from_path, + to_path: validatedArgs.to_path, + allow_shared_folder: validatedArgs.allow_shared_folder, + autorename: validatedArgs.autorename, + allow_ownership_transfer: validatedArgs.allow_ownership_transfer, + }); + + return { + content: [ + { + type: "text", + text: `File/folder moved from "${validatedArgs.from_path}" to "${response.result.metadata.path_display}"`, + }, + ], + }; +} + +/** + * Handle copy file operation + */ +async function handleCopyFile(args: any): Promise { + const validatedArgs = CopyFileSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesCopyV2({ + from_path: validatedArgs.from_path, + to_path: validatedArgs.to_path, + allow_shared_folder: validatedArgs.allow_shared_folder, + autorename: validatedArgs.autorename, + allow_ownership_transfer: validatedArgs.allow_ownership_transfer, + }); + + return { + content: [ + { + type: "text", + text: `File/folder copied from "${validatedArgs.from_path}" to "${response.result.metadata.path_display}"`, + }, + ], + }; +} + +/** + * Handle search files operation + */ +async function handleSearchFiles(args: any): Promise { + const validatedArgs = SearchFilesSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesSearchV2({ + query: validatedArgs.query, + options: { + path: validatedArgs.path, + max_results: validatedArgs.max_results, + file_status: validatedArgs.file_status as any, // Type assertion for compatibility + filename_only: validatedArgs.filename_only, + }, + }); + + const matches = response.result.matches?.map((match: any) => { + const metadata = match.metadata.metadata; + if (metadata['.tag'] === 'file') { + return `File: ${metadata.name} (${metadata.path_display}) - Size: ${metadata.size} bytes`; + } else if (metadata['.tag'] === 'folder') { + return `Folder: ${metadata.name} (${metadata.path_display})`; + } else { + return `${metadata['.tag']}: ${metadata.name} (${metadata.path_display})`; + } + }) || []; + + let resultText = `Search results for "${validatedArgs.query}"`; + if (validatedArgs.path) { + resultText += ` in "${validatedArgs.path}"`; + } + resultText += `:\n\n${matches.join('\n') || 'No results found'}`; + + // Add more results info + if (response.result.has_more) { + resultText += `\n\nMore results available. Showing first ${matches.length} results.`; + resultText += `\nUse 'search_files_continue' with cursor: ${response.result.cursor}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle search files continue operation + */ +async function handleSearchFilesContinue(args: any): Promise { + const validatedArgs = SearchFilesContinueSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesSearchContinueV2({ + cursor: validatedArgs.cursor, + }); + + const matches = response.result.matches?.map((match: any) => { + const metadata = match.metadata.metadata; + if (metadata['.tag'] === 'file') { + return `File: ${metadata.name} (${metadata.path_display}) - Size: ${metadata.size} bytes`; + } else if (metadata['.tag'] === 'folder') { + return `Folder: ${metadata.name} (${metadata.path_display})`; + } else { + return `${metadata['.tag']}: ${metadata.name} (${metadata.path_display})`; + } + }) || []; + + let resultText = `Search results (continued):\n\n${matches.join('\n') || 'No more results found'}`; + + // Add more results info + if (response.result.has_more) { + resultText += `\n\nMore results available. Use 'search_files_continue' with cursor: ${response.result.cursor}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle get file info operation + */ +async function handleGetFileInfo(args: any): Promise { + const validatedArgs = GetFileInfoSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.filesGetMetadata({ + path: validatedArgs.path, + include_media_info: validatedArgs.include_media_info, + include_deleted: validatedArgs.include_deleted, + include_has_explicit_shared_members: validatedArgs.include_has_explicit_shared_members, + }); + + const metadata = response.result; + let info = `Name: ${metadata.name}\nPath: ${metadata.path_display}`; + + if (metadata['.tag'] === 'file') { + info += `\nType: File\nSize: ${(metadata as any).size} bytes\nLast Modified: ${(metadata as any).server_modified}`; + if ((metadata as any).content_hash) { + info += `\nContent Hash: ${(metadata as any).content_hash}`; + } + } else if (metadata['.tag'] === 'folder') { + info += `\nType: Folder`; + if ((metadata as any).shared_folder_id) { + info += `\nShared Folder ID: ${(metadata as any).shared_folder_id}`; + } + } + + return { + content: [ + { + type: "text", + text: info, + }, + ], + }; +} + +/** + * Main handler for file management operations + */ +export async function handleFilesOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_list_folder": + return await handleListFolder(args); + case "dropbox_list_folder_continue": + return await handleListFolderContinue(args); + case "dropbox_create_folder": + return await handleCreateFolder(args); + case "dropbox_delete_file": + return await handleDeleteFile(args); + case "dropbox_move_file": + return await handleMoveFile(args); + case "dropbox_copy_file": + return await handleCopyFile(args); + case "dropbox_search_files": + return await handleSearchFiles(args); + case "dropbox_search_files_continue": + return await handleSearchFilesContinue(args); + case "dropbox_get_file_info": + return await handleGetFileInfo(args); + default: + throw new Error(`Unknown files operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/handlers/index.ts b/mcp_servers/dropbox/src/handlers/index.ts new file mode 100644 index 00000000..637047f6 --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/index.ts @@ -0,0 +1,6 @@ +export * from './files.js'; +export * from './file-operations.js'; +export * from './sharing.js'; +export * from './file-requests.js'; +export * from './batch-operations.js'; +export * from './account.js'; diff --git a/mcp_servers/dropbox/src/handlers/sharing.ts b/mcp_servers/dropbox/src/handlers/sharing.ts new file mode 100644 index 00000000..ad5d016b --- /dev/null +++ b/mcp_servers/dropbox/src/handlers/sharing.ts @@ -0,0 +1,525 @@ +import { CallToolRequest, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import * as schemas from "../schemas/index.js"; +import { getDropboxClient } from "../utils/context.js"; + +/** + * Handle add file member operation + */ +async function handleAddFileMember(args: any): Promise { + const validatedArgs = schemas.AddFileMemberSchema.parse(args); + const dropbox = getDropboxClient(); + + // First, get the file ID if a path was provided + let fileId = validatedArgs.file; + + // If the file parameter doesn't start with "id:", treat it as a path and get the file ID + if (!fileId.startsWith('id:')) { + const fileInfo = await dropbox.filesGetMetadata({ + path: fileId, + }); + fileId = (fileInfo.result as any).id; + if (!fileId) { + return { + content: [ + { + type: "text", + text: `Failed to get file ID for path: "${validatedArgs.file}"\n\nThe file exists but no ID was returned. This may be due to file type or permission limitations.`, + }, + ], + }; + } + } + + const members = validatedArgs.members.map(member => ({ + ".tag": "email", + email: member.email + })); + + const response = await dropbox.sharingAddFileMember({ + file: fileId, + members: members as any, + access_level: { ".tag": validatedArgs.members[0].access_level }, + quiet: validatedArgs.quiet, + custom_message: validatedArgs.custom_message, + }); + + return { + content: [ + { + type: "text", + text: `Member(s) added to file successfully!\n\nFile: ${validatedArgs.file}\nMembers added: ${validatedArgs.members.map(m => `${m.email} (${m.access_level})`).join(', ')}\nFile ID: ${fileId}`, + }, + ], + }; +} + +/** + * Handle list file members operation + */ +async function handleListFileMembers(args: any): Promise { + const validatedArgs = schemas.ListFileMembersSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingListFileMembers({ + file: validatedArgs.file, + include_inherited: validatedArgs.include_inherited, + limit: validatedArgs.limit, + }); + + const members = (response.result as any).users?.map((member: any) => + `${member.user?.email || 'N/A'} (${member.access_type?.['.tag'] || 'N/A'})` + ) || []; + + return { + content: [ + { + type: "text", + text: `Members of file "${validatedArgs.file}":\n\n${members.join('\n') || 'No members found'}`, + }, + ], + }; +} + +/** + * Handle remove file member operation + */ +async function handleRemoveFileMember(args: any): Promise { + const validatedArgs = schemas.RemoveFileMemberSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingRemoveFileMember2({ + file: validatedArgs.file, + member: { ".tag": "email", email: validatedArgs.member } as any, + }); + + return { + content: [ + { + type: "text", + text: `Member removed from file: ${validatedArgs.file}`, + }, + ], + }; +} + +/** + * Handle share folder operation + */ +async function handleShareFolder(args: any): Promise { + const validatedArgs = schemas.ShareFolderSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingShareFolder({ + path: validatedArgs.path, + member_policy: { ".tag": validatedArgs.member_policy } as any, + acl_update_policy: { ".tag": validatedArgs.acl_update_policy } as any, + shared_link_policy: { ".tag": validatedArgs.shared_link_policy } as any, + force_async: validatedArgs.force_async, + }); + + const result = response.result as any; + const sharedFolderId = result.shared_folder_id || result.async_job_id || 'Unknown'; + + return { + content: [ + { + type: "text", + text: `Folder shared successfully!\n\nFolder: ${validatedArgs.path}\nShared Folder ID: ${sharedFolderId}\nMember Policy: ${validatedArgs.member_policy}\nACL Update Policy: ${validatedArgs.acl_update_policy}\nShared Link Policy: ${validatedArgs.shared_link_policy}`, + }, + ], + }; +} + +/** + * Handle list folder members operation + */ +async function handleListFolderMembers(args: any): Promise { + const validatedArgs = schemas.ListFolderMembersSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingListFolderMembers({ + shared_folder_id: validatedArgs.shared_folder_id, + limit: validatedArgs.limit, + }); + + const members = (response.result as any).users?.map((member: any) => + `${member.user?.email || 'N/A'} (${member.access_type?.['.tag'] || 'N/A'})` + ) || []; + + return { + content: [ + { + type: "text", + text: `Members of shared folder "${validatedArgs.shared_folder_id}":\n\n${members.join('\n') || 'No members found'}`, + }, + ], + }; +} + +/** + * Handle add folder member operation + */ +async function handleAddFolderMember(args: any): Promise { + const validatedArgs = schemas.AddFolderMemberSchema.parse(args); + const dropbox = getDropboxClient(); + + const members = validatedArgs.members.map(member => ({ + member: { ".tag": "email", email: member.email }, + access_level: { ".tag": member.access_level } + })); + + const response = await dropbox.sharingAddFolderMember({ + shared_folder_id: validatedArgs.shared_folder_id, + members: members as any, + quiet: validatedArgs.quiet, + custom_message: validatedArgs.custom_message, + }); + + return { + content: [ + { + type: "text", + text: `Member(s) added to shared folder: ${validatedArgs.shared_folder_id}`, + }, + ], + }; +} + +/** + * Handle remove folder member operation + */ +async function handleRemoveFolderMember(args: any): Promise { + const validatedArgs = schemas.RemoveFolderMemberSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingRemoveFolderMember({ + shared_folder_id: validatedArgs.shared_folder_id, + member: { ".tag": "email", email: validatedArgs.member }, + leave_a_copy: validatedArgs.leave_a_copy, + }); + + const result = response.result as any; + let resultText = `Remove folder member operation initiated for shared folder ID: ${validatedArgs.shared_folder_id}\n\n`; + + if (result['.tag'] === 'async_job_id') { + resultText += `Operation is being processed asynchronously.\n\nJob ID: ${result.async_job_id}\n\nUse "check_job_status" with this ID to monitor the progress.`; + } else { + resultText += `Unexpected result: ${JSON.stringify(result)}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle share file operation + */ +async function handleShareFile(args: any): Promise { + const validatedArgs = schemas.ShareFileSchema.parse(args); + const dropbox = getDropboxClient(); + + const shareSettings: any = {}; + if (validatedArgs.settings) { + if (validatedArgs.settings.requested_visibility) { + shareSettings.requested_visibility = { ".tag": validatedArgs.settings.requested_visibility }; + } + if (validatedArgs.settings.link_password) { + shareSettings.link_password = validatedArgs.settings.link_password; + } + if (validatedArgs.settings.expires) { + shareSettings.expires = validatedArgs.settings.expires; + } + } + + const response = await dropbox.sharingCreateSharedLinkWithSettings({ + path: validatedArgs.path, + settings: Object.keys(shareSettings).length > 0 ? shareSettings : undefined, + }); + + const result = response.result; + return { + content: [ + { + type: "text", + text: `File shared successfully!\n\nFile: ${(result as any).name}\nShared Link: ${(result as any).url}\nPath: ${(result as any).path_display}\nVisibility: ${(result as any).link_permissions?.resolved_visibility?.['.tag'] || 'Unknown'}`, + }, + ], + }; +} + +/** + * Handle get shared links operation + */ +async function handleGetSharedLinks(args: any): Promise { + const validatedArgs = schemas.GetSharedLinksSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingListSharedLinks({ + path: validatedArgs.path, + cursor: validatedArgs.cursor, + }); + + const links = (response.result as any).links || []; + let resultText = `Shared Links${validatedArgs.path ? ` for "${validatedArgs.path}"` : ''}: ${links.length} link(s) found\n\n`; + + if (links.length === 0) { + resultText += 'No shared links found.'; + } else { + resultText += links.map((link: any, index: number) => { + return `${index + 1}. ${link.name}\n URL: ${link.url}\n Path: ${link.path_display}\n Visibility: ${link.link_permissions?.resolved_visibility?.['.tag'] || 'Unknown'}\n Expires: ${link.expires || 'Never'}`; + }).join('\n\n'); + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle unshare file operation + */ +async function handleUnshareFile(args: any): Promise { + const validatedArgs = schemas.UnshareFileSchema.parse(args); + const dropbox = getDropboxClient(); + + await dropbox.sharingUnshareFile({ + file: validatedArgs.file, + }); + + return { + content: [ + { + type: "text", + text: `Successfully unshared file: ${validatedArgs.file}\n\nAll members have been removed from this file (inherited members are not affected).`, + }, + ], + }; +} + +/** + * Handle unshare folder operation + */ +async function handleUnshareFolder(args: any): Promise { + const validatedArgs = schemas.UnshareFolderSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingUnshareFolder({ + shared_folder_id: validatedArgs.shared_folder_id, + leave_a_copy: validatedArgs.leave_a_copy, + }); + + const result = response.result as any; + let resultText = `Unshare folder operation initiated for shared folder ID: ${validatedArgs.shared_folder_id}\n\n`; + + if (result['.tag'] === 'complete') { + resultText += `Operation completed successfully. The folder has been unshared.`; + if (validatedArgs.leave_a_copy) { + resultText += `\nMembers will keep a copy of the folder in their Dropbox.`; + } else { + resultText += `\nThe folder has been removed from members' Dropbox accounts.`; + } + } else if (result['.tag'] === 'async_job_id') { + resultText += `Operation is being processed asynchronously.\n\nJob ID: ${result.async_job_id}\n\nUse "check_job_status" with this ID to monitor the progress.`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle list shared folders operation + */ +async function handleListSharedFolders(args: any): Promise { + const validatedArgs = schemas.ListSharedFoldersSchema.parse(args); + const dropbox = getDropboxClient(); + + const requestArgs: any = { + limit: validatedArgs.limit, + }; + + if (validatedArgs.cursor) { + requestArgs.cursor = validatedArgs.cursor; + } + + const response = await dropbox.sharingListFolders(requestArgs); + + const result = response.result as any; + let resultText = `Shared Folders:\n\n`; + + if (result.entries && result.entries.length > 0) { + result.entries.forEach((folder: any, index: number) => { + resultText += `${index + 1}. **${folder.name}**\n`; + resultText += ` ID: ${folder.shared_folder_id}\n`; + resultText += ` Path: ${folder.path_lower || 'N/A'}\n`; + resultText += ` Access Type: ${folder.access_type?.['.tag'] || 'Unknown'}\n`; + if (folder.is_team_folder) { + resultText += ` Team Folder: Yes\n`; + } + if (folder.policy) { + resultText += ` Policy: ${folder.policy.acl_update_policy?.['.tag'] || 'N/A'}\n`; + } + resultText += `\n`; + }); + } else { + resultText += `No shared folders found.\n`; + } + + if (result.has_more) { + resultText += `\nMore results available. Use 'list_shared_folders' with cursor: ${result.cursor}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle list received files operation + */ +async function handleListReceivedFiles(args: any): Promise { + const validatedArgs = schemas.ListReceivedFilesSchema.parse(args); + const dropbox = getDropboxClient(); + + const requestArgs: any = { + limit: validatedArgs.limit, + }; + + if (validatedArgs.cursor) { + requestArgs.cursor = validatedArgs.cursor; + } + + const response = await dropbox.sharingListReceivedFiles(requestArgs); + const result = response.result as any; + let resultText = `Files Shared With You:\n\n`; + + if (result.entries && result.entries.length > 0) { + result.entries.forEach((file: any, index: number) => { + resultText += `${index + 1}. **${file.name}**\n`; + resultText += ` ID: ${file.id}\n`; + resultText += ` Path: ${file.path_display || file.path_lower || 'N/A'}\n`; + resultText += ` Shared by: ${file.owner_display_names?.[0] || 'Unknown'}\n`; + resultText += ` Access Level: ${file.access_type?.['.tag'] || 'Unknown'}\n`; + if (file.time_invited) { + resultText += ` Invited: ${new Date(file.time_invited).toLocaleString()}\n`; + } + if (file.preview_url) { + resultText += ` Preview: Available\n`; + } + resultText += `\n`; + }); + } else { + resultText += `No files have been shared with you.\n`; + } + + if (result.has_more) { + resultText += `\nMore results available. Use 'list_received_files' with cursor: ${result.cursor}`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handle check job status operation + */ +async function handleCheckJobStatus(args: any): Promise { + const validatedArgs = schemas.CheckJobStatusSchema.parse(args); + const dropbox = getDropboxClient(); + + const response = await dropbox.sharingCheckJobStatus({ + async_job_id: validatedArgs.async_job_id, + }); + + const result = response.result as any; + let resultText = `Job Status for ID: ${validatedArgs.async_job_id}\n\n`; + + if (result['.tag'] === 'in_progress') { + resultText += `Status: In Progress\n\nThe operation is still being processed. Please check again in a moment.`; + } else if (result['.tag'] === 'complete') { + resultText += `Status: Complete\n\nThe operation has finished successfully.`; + } else if (result['.tag'] === 'failed') { + resultText += `Status: Failed\n\nThe operation has failed. Please check the operation parameters and try again.`; + if (result.failed) { + resultText += `\n\nFailure Details: ${JSON.stringify(result.failed, null, 2)}`; + } + } else { + resultText += `Status: ${result['.tag'] || 'Unknown'}\n\nUnexpected status returned.`; + } + + return { + content: [ + { + type: "text", + text: resultText, + }, + ], + }; +} + +/** + * Handler for sharing-related operations + */ +export async function handleSharingOperation(request: CallToolRequest): Promise { + const { name, arguments: args } = request.params; + + switch (name) { + case "dropbox_add_file_member": + return await handleAddFileMember(args); + case "dropbox_list_file_members": + return await handleListFileMembers(args); + case "dropbox_remove_file_member": + return await handleRemoveFileMember(args); + case "dropbox_share_folder": + return await handleShareFolder(args); + case "dropbox_list_folder_members": + return await handleListFolderMembers(args); + case "dropbox_add_folder_member": + return await handleAddFolderMember(args); + case "dropbox_remove_folder_member": + return await handleRemoveFolderMember(args); + case "dropbox_share_file": + return await handleShareFile(args); + case "dropbox_get_shared_links": + return await handleGetSharedLinks(args); + case "dropbox_unshare_file": + return await handleUnshareFile(args); + case "dropbox_unshare_folder": + return await handleUnshareFolder(args); + case "dropbox_list_shared_folders": + return await handleListSharedFolders(args); + case "dropbox_list_received_files": + return await handleListReceivedFiles(args); + case "dropbox_check_job_status": + return await handleCheckJobStatus(args); + default: + throw new Error(`Unknown sharing operation: ${name}`); + } +} diff --git a/mcp_servers/dropbox/src/index.ts b/mcp_servers/dropbox/src/index.ts new file mode 100644 index 00000000..a716b319 --- /dev/null +++ b/mcp_servers/dropbox/src/index.ts @@ -0,0 +1,297 @@ +#!/usr/bin/env node + +import express, { Request, Response } from 'express'; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + CallToolRequestSchema, + ListToolsRequestSchema, + ListResourceTemplatesRequestSchema, + ReadResourceRequestSchema, +} from "@modelcontextprotocol/sdk/types.js"; +import { Dropbox } from 'dropbox'; +import dotenv from 'dotenv'; + +dotenv.config(); + +// Import utilities +import { patchFetchResponse } from './utils/fetch-polyfill.js'; +import { formatDropboxError, addCommonErrorGuidance } from './utils/error-handling.js'; +import { asyncLocalStorage } from './utils/context.js'; + +// Import tool definitions +import { toolDefinitions } from './tools.js'; + +// Import handlers +import { + handleFilesOperation, + handleFileOperation, + handleSharingOperation, + handleFileRequestOperation, + handleBatchOperation, + handleAccountOperation, + handleReadResource +} from './handlers/index.js'; + +// Apply the fetch polyfill immediately +patchFetchResponse(); + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Dropbox access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +/** + * Create Dropbox client with access token + * TODO: Implement OAuth flow instead of using direct access tokens + * Reference: https://github.com/dropbox/dropbox-sdk-js/tree/main?tab=readme-ov-file#examples + * Current implementation expects pre-generated access tokens + */ +function createDropboxClient(accessToken: string): Dropbox { + return new Dropbox({ + fetch: fetch, + accessToken + }); +} + +// Get Dropbox MCP Server +const getDropboxMcpServer = () => { + // Server implementation + const server = new Server({ + name: "dropbox", + version: "1.0.0", + }, { + capabilities: { + tools: {}, + resources: {}, + }, + }); + + // Tool handlers + server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: toolDefinitions, + })); + + // Resource handlers + server.setRequestHandler(ListResourceTemplatesRequestSchema, async () => ({ + resourceTemplates: [ + { + uriTemplate: 'dropbox://{path}', + name: 'Dropbox File', + description: 'Access files from Dropbox storage', + }, + ], + })); + + server.setRequestHandler(ReadResourceRequestSchema, async (request) => { + const uri = request.params.uri; + return await handleReadResource(uri); + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + try { + // Determine which handler to use based on the tool name + if (['dropbox_list_folder', 'dropbox_list_folder_continue', 'dropbox_create_folder', 'dropbox_delete_file', 'dropbox_move_file', 'dropbox_copy_file', 'dropbox_search_files', 'dropbox_search_files_continue', 'dropbox_get_file_info'].includes(name)) { + return await handleFilesOperation(request); + } + + if (['dropbox_upload_file', 'dropbox_download_file', 'dropbox_get_thumbnail', 'dropbox_list_revisions', 'dropbox_restore_file', 'dropbox_get_temporary_link', 'dropbox_save_url', 'dropbox_save_url_check_job_status'].includes(name)) { + return await handleFileOperation(request); + } + + if (['dropbox_add_file_member', 'dropbox_list_file_members', 'dropbox_remove_file_member', 'dropbox_share_folder', 'dropbox_list_folder_members', 'dropbox_add_folder_member', 'dropbox_remove_folder_member', 'dropbox_list_shared_folders', 'dropbox_list_shared_folders_continue', 'dropbox_list_received_files', 'dropbox_check_job_status', 'dropbox_unshare_file', 'dropbox_unshare_folder', 'dropbox_share_file', 'dropbox_get_shared_links'].includes(name)) { + return await handleSharingOperation(request); + } + + if (['dropbox_create_file_request', 'dropbox_get_file_request', 'dropbox_list_file_requests', 'dropbox_delete_file_request', 'dropbox_update_file_request'].includes(name)) { + return await handleFileRequestOperation(request); + } + + if (['dropbox_batch_delete', 'dropbox_batch_move', 'dropbox_batch_copy', 'dropbox_check_batch_job_status', 'dropbox_lock_file_batch', 'dropbox_unlock_file_batch'].includes(name)) { + return await handleBatchOperation(request); + } + + if (['dropbox_get_current_account', 'dropbox_get_space_usage'].includes(name)) { + return await handleAccountOperation(request); + } + + // If no handler matches, return error + return { + content: [ + { + type: "text", + text: `Unknown tool: ${name}. This tool has not been implemented yet.`, + }, + ], + }; + } catch (error: any) { + console.error(`Error executing tool ${name}:`, error); + const errorMessage = addCommonErrorGuidance( + formatDropboxError(error, name, "request"), + error + ); + + return { + content: [ + { + type: "text", + text: errorMessage, + }, + ], + }; + } + }); + + return server; +}; + +// Export the server factory for use +export { getDropboxMcpServer }; + +// If this file is run directly, start the HTTP+SSE server +if (import.meta.url === `file://${process.argv[1]}`) { + const app = express(); + app.use(express.json()); + + //============================================================================= + // STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) + //============================================================================= + app.post('/mcp', (req: Request, res: Response) => { + handleMcpRequest(req, res); + }); + + async function handleMcpRequest(req: Request, res: Response) { + const accessToken = extractAccessToken(req); + + // Initialize Dropbox client only if access token is available + const dropboxClient = accessToken ? createDropboxClient(accessToken as string) : null; + + const server = getDropboxMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ dropboxClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } + } + + app.get('/mcp', async (req: Request, res: Response) => { + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); + }); + + app.delete('/mcp', async (req: Request, res: Response) => { + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); + }); + + //============================================================================= + // DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) + //============================================================================= + + // Map to store SSE transports + const transports = new Map(); + + app.get("/sse", (req: Request, res: Response) => { + handleSseRequest(req, res); + }); + + async function handleSseRequest(req: Request, res: Response) { + const accessToken = extractAccessToken(req); + + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getDropboxMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); + } + + app.post("/messages", (req: Request, res: Response) => { + handleMessagesRequest(req, res); + }); + + async function handleMessagesRequest(req: Request, res: Response) { + const sessionId = req.query.sessionId as string; + const accessToken = extractAccessToken(req); + + let transport: SSEServerTransport | undefined; + transport = sessionId ? transports.get(sessionId) : undefined; + if (transport) { + // Initialize Dropbox client only if access token is available + const dropboxClient = accessToken ? createDropboxClient(accessToken as string) : null; + + asyncLocalStorage.run({ dropboxClient }, async () => { + await transport!.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } + } + + // Start the server + const PORT = process.env.PORT || 5000; + app.listen(PORT, () => { + console.log(`Dropbox MCP server running on port ${PORT}`); + }); +} diff --git a/mcp_servers/dropbox/src/schemas/account.ts b/mcp_servers/dropbox/src/schemas/account.ts new file mode 100644 index 00000000..a6442073 --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/account.ts @@ -0,0 +1,5 @@ +import { z } from "zod"; + +export const GetCurrentAccountSchema = z.object({}); + +export const GetSpaceUsageSchema = z.object({}); diff --git a/mcp_servers/dropbox/src/schemas/batch-operations.ts b/mcp_servers/dropbox/src/schemas/batch-operations.ts new file mode 100644 index 00000000..fbe505bc --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/batch-operations.ts @@ -0,0 +1,43 @@ +import { z } from "zod"; + +// Batch Operations Schemas +export const BatchDeleteSchema = z.object({ + entries: z.array(z.object({ + path: z.string().describe("Path of the file or folder to delete"), + })).describe("List of files/folders to delete (up to 1000 entries)"), +}); + +export const BatchMoveSchema = z.object({ + entries: z.array(z.object({ + from_path: z.string().describe("Current path of the file or folder"), + to_path: z.string().describe("New path for the file or folder"), + })).describe("List of move operations to perform (up to 1000 entries)"), + autorename: z.boolean().optional().default(false).describe("Automatically rename if destination already exists"), + allow_ownership_transfer: z.boolean().optional().default(false).describe("Allow ownership transfer"), +}); + +export const BatchCopySchema = z.object({ + entries: z.array(z.object({ + from_path: z.string().describe("Path of the file or folder to copy"), + to_path: z.string().describe("Destination path for the copy"), + })).describe("List of copy operations to perform (up to 1000 entries)"), + autorename: z.boolean().optional().default(false).describe("Automatically rename if destination already exists"), +}); + +// Batch Job Status Check Schema +export const BatchJobStatusSchema = z.object({ + async_job_id: z.string().describe("The async job ID returned from a batch operation"), +}); + +// File Locking Schemas +export const LockFileBatchSchema = z.object({ + entries: z.array(z.object({ + path: z.string().describe("Path of the file to lock"), + })).describe("List of files to lock (up to 1000 entries)"), +}); + +export const UnlockFileBatchSchema = z.object({ + entries: z.array(z.object({ + path: z.string().describe("Path of the file to unlock"), + })).describe("List of files to unlock (up to 1000 entries)"), +}); diff --git a/mcp_servers/dropbox/src/schemas/file-operations.ts b/mcp_servers/dropbox/src/schemas/file-operations.ts new file mode 100644 index 00000000..becb5ab5 --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/file-operations.ts @@ -0,0 +1,61 @@ +import { z } from "zod"; + +export const UploadFileSchema = z.object({ + dropbox_path: z.string().describe("Path where the file should be uploaded in Dropbox (e.g., '/folder/filename.txt')"), + source_uri: z.string().describe( + "URI of the resource to upload. Supported protocols:\n" + + "- data (Data URIs, e.g., data:text/plain;base64,SGVsbG8sIFdvcmxkIQ%3D%3D)\n" + + "- ftp (FTP URIs, e.g., ftp://ftp.kernel.org/pub/site/README)\n" + + "- http (HTTP URIs, e.g., http://www.example.com/path/to/name)\n" + + "- https (HTTPS URIs, e.g., https://www.example.com/path/to/name)" + ), + mode: z.enum(['add', 'overwrite', 'update']).optional().default('add').describe("Upload mode"), + autorename: z.boolean().optional().default(false).describe("Automatically rename file if it already exists"), + mute: z.boolean().optional().default(false).describe("Suppress notifications"), +}) +.refine( + (data) => /^(data:||ftp:\/\/|https?:\/\/)/.test(data.source_uri), + { + message: "source_uri must start with one of: data:, ftp://, http://, https://", + path: ["source_uri"], + } +); + +export const DownloadFileSchema = z.object({ + path: z.string().describe("Path of the file to download"), +}); + +export const GetThumbnailSchema = z.object({ + path: z.string().describe("Path of the file to get thumbnail for"), + format: z.enum(["jpeg", "png"]).optional().default("jpeg").describe("Image format for the thumbnail"), + size: z.enum(["w32h32", "w64h64", "w128h128", "w256h256", "w480h320", "w640h480", "w960h640", "w1024h768", "w2048h1536"]).optional().default("w256h256").describe("Size of the thumbnail"), +}); + +export const GetPreviewSchema = z.object({ + path: z.string().describe("Path of the file to get preview for"), +}); + +export const GetTemporaryLinkSchema = z.object({ + path: z.string().describe("Path of the file to get temporary link for"), +}); + +export const ListRevisionsSchema = z.object({ + path: z.string().describe("Path of the file to get revisions for"), + mode: z.enum(['path', 'id']).optional().default('path').describe("How to interpret the path"), + limit: z.number().optional().default(10).describe("Maximum number of revisions to return"), +}); + +export const RestoreFileSchema = z.object({ + path: z.string().describe("Path of the file to restore"), + rev: z.string().describe("Revision ID to restore to"), +}); + +// Save URL Schemas +export const SaveUrlSchema = z.object({ + path: z.string().describe("Path where the file should be saved (e.g., '/folder/filename.ext')"), + url: z.string().describe("URL to download and save to Dropbox"), +}); + +export const SaveUrlCheckJobStatusSchema = z.object({ + async_job_id: z.string().describe("The async job ID returned from save_url operation"), +}); diff --git a/mcp_servers/dropbox/src/schemas/file-requests.ts b/mcp_servers/dropbox/src/schemas/file-requests.ts new file mode 100644 index 00000000..22c0923c --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/file-requests.ts @@ -0,0 +1,26 @@ +import { z } from "zod"; + +// File Requests Schemas +export const CreateFileRequestSchema = z.object({ + title: z.string().describe("The title of the file request"), + destination: z.string().describe("The path of the folder where uploaded files will be sent"), + description: z.string().optional().describe("Description of the file request"), +}); + +export const GetFileRequestSchema = z.object({ + id: z.string().describe("The ID of the file request"), +}); + +export const ListFileRequestsSchema = z.object({}); + +export const DeleteFileRequestSchema = z.object({ + ids: z.array(z.string()).describe("List of file request IDs to delete"), +}); + +export const UpdateFileRequestSchema = z.object({ + id: z.string().describe("The ID of the file request to update"), + title: z.string().optional().describe("New title for the file request"), + destination: z.string().optional().describe("New destination path for the file request"), + description: z.string().optional().describe("New description for the file request"), + open: z.boolean().optional().describe("Whether to open (true) or close (false) the file request"), +}); diff --git a/mcp_servers/dropbox/src/schemas/files.ts b/mcp_servers/dropbox/src/schemas/files.ts new file mode 100644 index 00000000..7b9d1594 --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/files.ts @@ -0,0 +1,61 @@ +import { z } from "zod"; + +// Basic file and folder operations +export const ListFolderSchema = z.object({ + path: z.string().optional().default("").describe("A unique identifier for the file. Can be a path (/(.|[\\r\\n])*)?), file ID (id:.*), or namespace ID (ns:[0-9]+(/.*)?). Empty string for root folder."), + recursive: z.boolean().optional().default(false).describe("If true, the list folder operation will be applied recursively to all subfolders and the response will contain contents of all subfolders. Note: In some cases, setting recursive to true may lead to performance issues or errors, especially when traversing folder structures with a large number of items. A workaround is to set recursive to false and traverse subfolders one at a time."), + include_media_info: z.boolean().optional().default(false).describe("DEPRECATED: Field is deprecated. If true, FileMetadata.media_info is set for photo and video. This parameter will no longer have an effect starting December 2, 2019."), + include_deleted: z.boolean().optional().default(false).describe("If true, the results will include entries for files and folders that used to exist but were deleted."), + include_has_explicit_shared_members: z.boolean().optional().default(false).describe("If true, the results will include a flag for each file indicating whether or not that file has any explicit members."), + include_mounted_folders: z.boolean().optional().default(true).describe("If true, the results will include entries under mounted folders which includes app folder, shared folder and team folder."), + limit: z.number().min(1).max(2000).optional().describe("The maximum number of results to return per request. Note: This is an approximate number and there can be slightly more entries returned in some cases."), + include_non_downloadable_files: z.boolean().optional().default(true).describe("If true, include files that are not downloadable, i.e. Google Docs."), +}); + +export const ListFolderContinueSchema = z.object({ + cursor: z.string().describe("\"Cursor\" from previous list_folder/_continue operation to continue listing"), +}); + +export const CreateFolderSchema = z.object({ + path: z.string().describe("Path of the folder to create"), + autorename: z.boolean().optional().default(false).describe("Automatically rename folder if it already exists"), +}); + +export const DeleteFileSchema = z.object({ + path: z.string().describe("Path of the file or folder to delete"), +}); + +export const MoveFileSchema = z.object({ + from_path: z.string().describe("Current path of the file or folder"), + to_path: z.string().describe("New path for the file or folder"), + allow_shared_folder: z.boolean().optional().default(false).describe("Allow moving shared folders"), + autorename: z.boolean().optional().default(false).describe("Automatically rename if destination already exists"), + allow_ownership_transfer: z.boolean().optional().default(false).describe("Allow ownership transfer"), +}); + +export const CopyFileSchema = z.object({ + from_path: z.string().describe("Path of the file or folder to copy"), + to_path: z.string().describe("Destination path for the copy"), + allow_shared_folder: z.boolean().optional().default(false).describe("Allow copying shared folders"), + autorename: z.boolean().optional().default(false).describe("Automatically rename if destination already exists"), + allow_ownership_transfer: z.boolean().optional().default(false).describe("Allow ownership transfer"), +}); + +export const GetFileInfoSchema = z.object({ + path: z.string().describe("Path of the file to get information about"), + include_media_info: z.boolean().optional().default(false).describe("Include media info for photos and videos"), + include_deleted: z.boolean().optional().default(false).describe("Include deleted files"), + include_has_explicit_shared_members: z.boolean().optional().default(false).describe("Include shared member info"), +}); + +export const SearchFilesSchema = z.object({ + query: z.string().describe("Search query for finding files"), + path: z.string().optional().default("").describe("Path to search within (empty for entire Dropbox)"), + max_results: z.number().optional().default(100).describe("Maximum number of search results"), + file_status: z.enum(['active', 'deleted']).optional().default('active').describe("File status to search for"), + filename_only: z.boolean().optional().default(false).describe("Search only in filenames"), +}); + +export const SearchFilesContinueSchema = z.object({ + cursor: z.string().min(1).describe("Cursor from previous search files operation to continue listing"), +}); diff --git a/mcp_servers/dropbox/src/schemas/index.ts b/mcp_servers/dropbox/src/schemas/index.ts new file mode 100644 index 00000000..117a297c --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/index.ts @@ -0,0 +1,7 @@ +// Re-export all schemas from different modules +export * from './account.js'; +export * from './batch-operations.js'; +export * from './file-operations.js'; +export * from './file-requests.js'; +export * from './files.js'; +export * from './sharing.js'; diff --git a/mcp_servers/dropbox/src/schemas/sharing.ts b/mcp_servers/dropbox/src/schemas/sharing.ts new file mode 100644 index 00000000..71c5755d --- /dev/null +++ b/mcp_servers/dropbox/src/schemas/sharing.ts @@ -0,0 +1,89 @@ +import { z } from "zod"; + +export const ShareFileSchema = z.object({ + path: z.string().describe("Path of the file or folder to share"), + settings: z.object({ + requested_visibility: z.enum(['public', 'team_only', 'password']).optional().describe("Link visibility. 'public' works for all accounts. 'team_only' and 'password' require team membership or paid accounts (Plus/Professional). If you're unsure about your account type, use 'get_current_account' first to check - look for account_type.tag and team fields"), + link_password: z.string().optional().describe("Password for password-protected links. PAID FEATURE: Requires Dropbox Plus/Professional or team account. Will cause 'settings_error/not_authorized' on basic/free accounts. Check account type with 'get_current_account' before using"), + expires: z.string().optional().describe("Expiration date (ISO 8601 format). PAID FEATURE: Requires Dropbox Plus/Professional or team account. Will cause 'settings_error/not_authorized' on basic/free accounts. Check account type with 'get_current_account' before using"), + }).optional().describe("Share settings. For basic/free accounts: leave empty or use only 'requested_visibility': 'public'. Advanced settings require paid accounts - use 'get_current_account' to verify capabilities before setting password/expiration"), +}); + +export const GetSharedLinksSchema = z.object({ + path: z.string().optional().describe("Path to get shared links for (omit for all links)"), + cursor: z.string().optional().describe("Cursor for pagination"), +}); + +export const AddFileMemberSchema = z.object({ + file: z.string().describe("File ID (format: 'id:...') or path of the file to add member to. Note: Dropbox API requires file ID, but path will be automatically converted."), + members: z.array(z.object({ + email: z.string().describe("Email address of the member"), + access_level: z.enum(['viewer', 'editor']).optional().default('viewer').describe("Access level for the member"), + })).describe("List of members to add"), + quiet: z.boolean().optional().default(false).describe("Whether to suppress notifications"), + custom_message: z.string().optional().describe("Custom message to include in the invitation"), +}); + +export const ListFileMembersSchema = z.object({ + file: z.string().describe("Path of the file to list members for"), + include_inherited: z.boolean().optional().default(true).describe("Include inherited permissions"), + limit: z.number().optional().default(100).describe("Maximum number of members to return"), +}); + +export const RemoveFileMemberSchema = z.object({ + file: z.string().describe("Path of the file to remove member from"), + member: z.string().describe("Email address of the member to remove"), +}); + +export const ShareFolderSchema = z.object({ + path: z.string().describe("Path of the folder to share"), + member_policy: z.enum(['team', 'anyone']).optional().default('anyone').describe("Who can be a member of this shared folder. Only applicable if the current user is on a team"), + acl_update_policy: z.enum(['owner', 'editors']).optional().default('owner').describe("Who can add and remove members"), + shared_link_policy: z.enum(['anyone', 'members']).optional().default('anyone').describe("The policy to apply to shared links created for content inside this shared folder. The current user must be on a team to set this policy to 'members'"), + force_async: z.boolean().optional().default(false).describe("Whether to force the share to happen asynchronously"), + access_inheritance: z.enum(['inherit', 'no_inherit']).optional().default('inherit').describe("The access inheritance settings for the folder"), +}); + +export const ListFolderMembersSchema = z.object({ + shared_folder_id: z.string().describe("ID of the shared folder"), + limit: z.number().optional().default(100).describe("Maximum number of members to return"), +}); + +export const AddFolderMemberSchema = z.object({ + shared_folder_id: z.string().describe("ID of the shared folder"), + members: z.array(z.object({ + email: z.string().describe("Email address of the member"), + access_level: z.enum(['viewer', 'editor', 'owner']).optional().default('viewer').describe("Access level for the member"), + })).describe("List of members to add"), + quiet: z.boolean().optional().default(false).describe("Whether to suppress notifications"), + custom_message: z.string().optional().describe("Custom message to include in the invitation"), +}); + +export const UnshareFileSchema = z.object({ + file: z.string().min(1).describe("The file to unshare. Can be a file ID (format: 'id:...') or path."), +}); + +export const UnshareFolderSchema = z.object({ + shared_folder_id: z.string().describe("The ID for the shared folder"), + leave_a_copy: z.boolean().optional().default(false).describe("If true, members of this shared folder will get a copy of this folder after it's unshared. Otherwise, it will be removed from their Dropbox."), +}); + +export const ListSharedFoldersSchema = z.object({ + limit: z.number().optional().default(100).describe("Maximum number of shared folders to return"), + cursor: z.string().optional().describe("Cursor from previous list_shared_folders call to continue listing - use this to get the next page of results"), +}); + +export const ListReceivedFilesSchema = z.object({ + limit: z.number().optional().default(100).describe("Maximum number of received files to return"), + cursor: z.string().optional().describe("Cursor from previous list_received_files call to continue listing - use this to get the next page of results"), +}); + +export const CheckJobStatusSchema = z.object({ + async_job_id: z.string().min(1).describe("The async job ID returned from a sharing operation"), +}); + +export const RemoveFolderMemberSchema = z.object({ + shared_folder_id: z.string().describe("The ID for the shared folder."), + member: z.string().describe("Email address of the member to remove"), + leave_a_copy: z.boolean().describe("If true, the removed user will keep their copy of the folder after it's unshared."), +}); diff --git a/mcp_servers/dropbox/src/tools.ts b/mcp_servers/dropbox/src/tools.ts new file mode 100644 index 00000000..f2020810 --- /dev/null +++ b/mcp_servers/dropbox/src/tools.ts @@ -0,0 +1,313 @@ +import { zodToJsonSchema } from "zod-to-json-schema"; +import * as schemas from './schemas/index.js'; + +export const toolDefinitions = [ + { + name: "dropbox_list_folder", + title: "Folder Listing", + description: "Lists the contents of a folder", + inputSchema: zodToJsonSchema(schemas.ListFolderSchema), + annotations: { category: "DROPBOX_FOLDER", readOnlyHint: true }, + }, + { + name: "dropbox_list_folder_continue", + title: "Continue Folder Listing", + description: "Continues listing folder contents using a cursor from previous list_folder operation", + inputSchema: zodToJsonSchema(schemas.ListFolderContinueSchema), + annotations: { category: "DROPBOX_FOLDER", readOnlyHint: true }, + }, + { + name: "dropbox_create_folder", + title: "Create New Folder", + description: "Creates a new folder", + inputSchema: zodToJsonSchema(schemas.CreateFolderSchema), + annotations: { category: "DROPBOX_FOLDER" }, + }, + { + name: "dropbox_delete_file", + title: "Delete File or Folder", + description: "Deletes a file or folder", + inputSchema: zodToJsonSchema(schemas.DeleteFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_move_file", + title: "Move or Rename", + description: "Moves or renames a file or folder", + inputSchema: zodToJsonSchema(schemas.MoveFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_copy_file", + title: "Copy File or Folder", + description: "Creates a copy of a file or folder", + inputSchema: zodToJsonSchema(schemas.CopyFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_search_files", + title: "Search Files", + description: "Searches for files and folders", + inputSchema: zodToJsonSchema(schemas.SearchFilesSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_search_files_continue", + title: "Continue File Search", + description: "Continues searching files using a cursor from previous search_files operation", + inputSchema: zodToJsonSchema(schemas.SearchFilesContinueSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_get_file_info", + title: "Get File Info", + description: "Gets metadata information about a file or folder", + inputSchema: zodToJsonSchema(schemas.GetFileInfoSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_share_file", + title: "Create Shared Link", + description: "Creates a shared link for a file or folder. Advanced settings (password protection, expiration dates) require paid Dropbox accounts (Plus/Professional) or team membership. Basic accounts can only use 'public' visibility. If unsure about account capabilities, use 'get_current_account' first to check account type before setting advanced options.", + inputSchema: zodToJsonSchema(schemas.ShareFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_get_shared_links", + title: "List Shared Links", + description: "Lists shared links for files and folders", + inputSchema: zodToJsonSchema(schemas.GetSharedLinksSchema), + annotations: { category: "DROPBOX_LINKS", readOnlyHint: true }, + }, + { + name: "dropbox_upload_file", + title: "Upload File", + description: "Uploads a local file to Dropbox using file:// URI. Reads the file directly from the local filesystem and uploads it as binary data.", + inputSchema: zodToJsonSchema(schemas.UploadFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_download_file", + title: "Download File", + description: "Downloads a file from Dropbox", + inputSchema: zodToJsonSchema(schemas.DownloadFileSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_list_revisions", + title: "List File Revisions", + description: "Lists the revisions of a file", + inputSchema: zodToJsonSchema(schemas.ListRevisionsSchema), + annotations: { category: "DROPBOX_METADATA", readOnlyHint: true }, + }, + { + name: "dropbox_restore_file", + title: "Restore File Version", + description: "Restores a file to a previous revision", + inputSchema: zodToJsonSchema(schemas.RestoreFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_get_current_account", + title: "Get Account Info", + description: "Gets information about the current account. Returns account details including display name, email, and account ID in the format: 'Account: {display_name}\\nEmail: {email}\\nAccount ID: {account_id}'", + inputSchema: zodToJsonSchema(schemas.GetCurrentAccountSchema), + annotations: { category: "DROPBOX_ACCOUNT", readOnlyHint: true }, + }, + { + name: "dropbox_get_space_usage", + title: "Get Storage Usage", + description: "Gets the current space usage information. Returns storage usage details including used bytes and allocated space in the format: 'Used: {used_bytes} bytes\\nAllocated: {allocated_bytes} bytes' for individual accounts, or 'Used: {used_bytes} bytes\\nAllocation Type: {type}' for other account types", + inputSchema: zodToJsonSchema(schemas.GetSpaceUsageSchema), + annotations: { category: "DROPBOX_ACCOUNT", readOnlyHint: true }, + }, + { + name: "dropbox_get_temporary_link", + title: "Get Temporary Link", + description: "Gets a temporary link to a file", + inputSchema: zodToJsonSchema(schemas.GetTemporaryLinkSchema), + annotations: { category: "DROPBOX_LINKS", readOnlyHint: true }, + }, + { + name: "dropbox_add_file_member", + title: "Add File Member", + description: "Adds a member to a file", + inputSchema: zodToJsonSchema(schemas.AddFileMemberSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_list_file_members", + title: "List File Members", + description: "Lists the members of a file", + inputSchema: zodToJsonSchema(schemas.ListFileMembersSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_remove_file_member", + title: "Remove File Member", + description: "Removes a member from a file", + inputSchema: zodToJsonSchema(schemas.RemoveFileMemberSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_share_folder", + title: "Share Folder", + description: "Shares a folder", + inputSchema: zodToJsonSchema(schemas.ShareFolderSchema), + annotations: { category: "DROPBOX_FOLDER" }, + }, + { + name: "dropbox_list_folder_members", + title: "List Folder Members", + description: "Lists the members of a shared folder", + inputSchema: zodToJsonSchema(schemas.ListFolderMembersSchema), + annotations: { category: "DROPBOX_FOLDER", readOnlyHint: true }, + }, + { + name: "dropbox_add_folder_member", + title: "Add Folder Member", + description: "Adds a member to a shared folder", + inputSchema: zodToJsonSchema(schemas.AddFolderMemberSchema), + annotations: { category: "DROPBOX_FOLDER" }, + }, + { + name: "dropbox_list_shared_folders", + title: "List Shared Folders", + description: "Lists all shared folders that the current user has access to", + inputSchema: zodToJsonSchema(schemas.ListSharedFoldersSchema), + annotations: { category: "DROPBOX_FOLDER", readOnlyHint: true }, + }, + { + name: "dropbox_unshare_file", + title: "Unshare File", + description: "Remove all members from this file. Does not remove inherited members.", + inputSchema: zodToJsonSchema(schemas.UnshareFileSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_unshare_folder", + title: "Unshare Folder", + description: "Allows a shared folder owner to unshare the folder. You'll need to call check_job_status to determine if the action has completed successfully.", + inputSchema: zodToJsonSchema(schemas.UnshareFolderSchema), + annotations: { category: "DROPBOX_FOLDER" }, + }, + { + name: "dropbox_create_file_request", + title: "Create File Request", + description: "Creates a file request", + inputSchema: zodToJsonSchema(schemas.CreateFileRequestSchema), + annotations: { category: "DROPBOX_FILE_REQUEST" }, + }, + { + name: "dropbox_get_file_request", + title: "Get File Request", + description: "Gets a file request by ID", + inputSchema: zodToJsonSchema(schemas.GetFileRequestSchema), + annotations: { category: "DROPBOX_FILE_REQUEST", readOnlyHint: true }, + }, + { + name: "dropbox_list_file_requests", + title: "List File Requests", + description: "Lists all file requests", + inputSchema: zodToJsonSchema(schemas.ListFileRequestsSchema), + annotations: { category: "DROPBOX_FILE_REQUEST", readOnlyHint: true }, + }, + { + name: "dropbox_delete_file_request", + title: "Delete File Request", + description: "Deletes file requests", + inputSchema: zodToJsonSchema(schemas.DeleteFileRequestSchema), + annotations: { category: "DROPBOX_FILE_REQUEST" }, + }, + { + name: "dropbox_update_file_request", + title: "Update File Request", + description: "Updates a file request (title, destination, description, open/close status)", + inputSchema: zodToJsonSchema(schemas.UpdateFileRequestSchema), + annotations: { category: "DROPBOX_FILE_REQUEST" }, + }, + { + name: "dropbox_batch_delete", + title: "Batch Delete Files", + description: "Deletes multiple files and folders in a single operation. This is an efficient way to delete many items at once. NOTE: This may be an async operation that returns a job ID for status checking. Each entry only needs a 'path' field.", + inputSchema: zodToJsonSchema(schemas.BatchDeleteSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_batch_move", + title: "Batch Move Files", + description: "Moves or renames multiple files and folders in a single operation. This is an efficient way to move many items at once. NOTE: This may be an async operation that returns a job ID for status checking. Each entry needs 'from_path' and 'to_path' fields. Optional top-level 'autorename' and 'allow_ownership_transfer' apply to all entries.", + inputSchema: zodToJsonSchema(schemas.BatchMoveSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_batch_copy", + title: "Batch Copy Files", + description: "Copies multiple files and folders in a single operation. This is an efficient way to copy many items at once. NOTE: This may be an async operation that returns a job ID for status checking. Each entry needs 'from_path' and 'to_path' fields. Optional top-level 'autorename' applies to all entries.", + inputSchema: zodToJsonSchema(schemas.BatchCopySchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_check_batch_job_status", + title: "Check Batch Job Status", + description: "Checks the status of a batch operation using the async job ID returned from batch operations. Use this to monitor progress and get final results of batch_copy, batch_move, or batch_delete operations. The tool automatically detects the operation type.", + inputSchema: zodToJsonSchema(schemas.BatchJobStatusSchema), + annotations: { category: "DROPBOX_METADATA", readOnlyHint: true }, + }, + { + name: "dropbox_get_thumbnail", + title: "Get File Thumbnail", + description: "Gets a thumbnail image for a file and returns it as binary image data with proper MIME type. The response contains the actual image that can be displayed directly by compatible clients.", + inputSchema: zodToJsonSchema(schemas.GetThumbnailSchema), + annotations: { category: "DROPBOX_METADATA", readOnlyHint: true }, + }, + { + name: "dropbox_save_url", + title: "Save URL to Dropbox", + description: "Downloads content from a URL and saves it as a file in Dropbox. This is useful for saving web content, images, documents, etc. directly from URLs.", + inputSchema: zodToJsonSchema(schemas.SaveUrlSchema), + annotations: { category: "DROPBOX_LINKS" }, + }, + { + name: "dropbox_save_url_check_job_status", + title: "Check URL Save Status", + description: "Checks the status of a save URL operation using the async job ID. Use this to monitor the progress of URL downloads.", + inputSchema: zodToJsonSchema(schemas.SaveUrlCheckJobStatusSchema), + annotations: { category: "DROPBOX_METADATA", readOnlyHint: true }, + }, + { + name: "dropbox_lock_file_batch", + title: "Lock Files (Batch)", + description: "Temporarily locks files to prevent them from being edited by others. This is useful during collaborative work to avoid editing conflicts. NOTE: This may be an async operation that returns a job ID for status checking.", + inputSchema: zodToJsonSchema(schemas.LockFileBatchSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_unlock_file_batch", + title: "Unlock Files (Batch)", + description: "Unlocks previously locked files, allowing others to edit them again. NOTE: This may be an async operation that returns a job ID for status checking.", + inputSchema: zodToJsonSchema(schemas.UnlockFileBatchSchema), + annotations: { category: "DROPBOX_FILES" }, + }, + { + name: "dropbox_list_received_files", + title: "List Received Files", + description: "Lists files that have been shared with the current user by others", + inputSchema: zodToJsonSchema(schemas.ListReceivedFilesSchema), + annotations: { category: "DROPBOX_FILES", readOnlyHint: true }, + }, + { + name: "dropbox_check_job_status", + title: "Check Job Status", + description: "Checks the status of an asynchronous operation (like unshare_folder, share_folder, etc.)", + inputSchema: zodToJsonSchema(schemas.CheckJobStatusSchema), + annotations: { category: "DROPBOX_METADATA", readOnlyHint: true }, + }, + { + name: "dropbox_remove_folder_member", + title: "Remove Folder Member", + description: "Removes a member from a shared folder.", + inputSchema: zodToJsonSchema(schemas.RemoveFolderMemberSchema), + annotations: { category: "DROPBOX_FOLDER" }, + }, +]; diff --git a/mcp_servers/dropbox/src/utils/context.ts b/mcp_servers/dropbox/src/utils/context.ts new file mode 100644 index 00000000..01dc3c84 --- /dev/null +++ b/mcp_servers/dropbox/src/utils/context.ts @@ -0,0 +1,16 @@ +import { AsyncLocalStorage } from 'async_hooks'; +import { Dropbox } from 'dropbox'; + +// Create AsyncLocalStorage for request context +export const asyncLocalStorage = new AsyncLocalStorage<{ + dropboxClient: Dropbox | null; +}>(); + +// Helper function to get Dropbox client from context +export function getDropboxClient() { + const client = asyncLocalStorage.getStore()?.dropboxClient; + if (!client) { + throw new Error('Access token is missing. Provide it via x-auth-token header or set DROPBOX_ACCESS_TOKEN in the environment.'); + } + return client; +} diff --git a/mcp_servers/dropbox/src/utils/error-handling.ts b/mcp_servers/dropbox/src/utils/error-handling.ts new file mode 100644 index 00000000..4ed4cf57 --- /dev/null +++ b/mcp_servers/dropbox/src/utils/error-handling.ts @@ -0,0 +1,79 @@ +/** + * Formats Dropbox API errors with detailed information for better debugging + * @param error The error object from Dropbox API + * @param operation The operation that failed (e.g., "upload file", "share folder") + * @param resource The resource being operated on (e.g., file path, folder name) + * @returns Formatted error message with detailed information + */ +export function formatDropboxError(error: any, operation: string, resource?: string): string { + let errorMessage = `Failed to ${operation}`; + if (resource) { + errorMessage += `: "${resource}"`; + } + errorMessage += `\n`; + + // Add detailed API error information + errorMessage += `\nDetailed Error Information:\n`; + errorMessage += `- HTTP Status: ${error.status || 'Unknown'}\n`; + errorMessage += `- Error Summary: ${error.error_summary || 'Not provided'}\n`; + errorMessage += `- Error Message: ${error.message || 'Not provided'}\n`; + + // Add the full error object for debugging if available + if (error.error) { + errorMessage += `- API Error Details: ${JSON.stringify(error.error, null, 2)}\n`; + } + + return errorMessage; +} + +/** + * Adds common HTTP status code explanations to error messages + * @param errorMessage The base error message + * @param error The error object + * @param context Additional context for specific status codes + * @returns Enhanced error message with status-specific guidance + */ +export function addCommonErrorGuidance(errorMessage: string, error: any, context?: { + resource?: string; + operation?: string; + requiresAuth?: boolean; + requiresOwnership?: boolean; +}): string { + const status = error.status; + const resource = context?.resource || 'resource'; + + if (status === 400) { + errorMessage += `\nError 400: Bad request - Invalid parameters or malformed request.\n\nCommon causes:\n- Invalid path format (must start with '/')\n- Invalid parameter values\n- Malformed request data\n- Resource doesn't exist or isn't accessible`; + } else if (status === 401) { + errorMessage += `\nError 401: Unauthorized - Your access token may be invalid or expired.\n\nCheck:\n- Access token is valid and not expired\n- Token has the required permissions`; + if (context?.requiresAuth) { + errorMessage += `\n- Token has the specific scope needed for this operation`; + } + errorMessage += `\n- You're authenticated with the correct Dropbox account`; + } else if (status === 403) { + errorMessage += `\nError 403: Permission denied - You don't have permission for this operation.\n\nThis could mean:\n- You don't own the ${resource}\n- Your access token lacks required permissions`; + if (context?.requiresOwnership) { + errorMessage += `\n- Only the owner can perform this operation`; + } + errorMessage += `\n- The ${resource} has restricted access settings`; + } else if (status === 404) { + errorMessage += `\nError 404: Not found - The ${resource} doesn't exist.\n\nMake sure:\n- The path is correct and starts with '/'\n- The ${resource} exists in your Dropbox\n- You have access to the ${resource}\n- The ${resource} hasn't been moved or deleted`; + } else if (status === 409) { + errorMessage += `\nError 409: Conflict - Operation failed due to a conflict.\n\nCommon causes:\n- Resource already exists\n- Concurrent modifications\n- Operation conflicts with current state\n- Name or path conflicts`; + + // Add specific guidance for sharing operations + if (context?.operation === 'share_file' || context?.operation === 'share_folder') { + errorMessage += `\n\nFor sharing operations specifically:\n- 'settings_error/not_authorized': Advanced settings (password, expiration) require paid Dropbox accounts\n- 'settings_error/invalid_settings': Check that settings combination is valid for your account type\n- Team-only visibility requires team membership\n- Password protection requires Dropbox Plus/Professional`; + } + } else if (status === 429) { + errorMessage += `\nError 429: Too many requests - You're hitting rate limits.\n\nTips:\n- Wait a moment before trying again\n- Reduce the frequency of requests\n- Consider batching operations if available`; + } else if (status === 507) { + errorMessage += `\nError 507: Insufficient storage - Operation would exceed storage limits.`; + } else if (status && status >= 500) { + errorMessage += `\nError ${status}: Server error - Dropbox is experiencing issues.\n\nTry:\n- Waiting a moment and trying again\n- The issue is likely temporary`; + } else if (status) { + errorMessage += `\nError ${status}: ${error.message || error.error_summary || 'Unknown error'}`; + } + + return errorMessage; +} diff --git a/mcp_servers/dropbox/src/utils/error-msg.ts b/mcp_servers/dropbox/src/utils/error-msg.ts new file mode 100644 index 00000000..3bd0a36a --- /dev/null +++ b/mcp_servers/dropbox/src/utils/error-msg.ts @@ -0,0 +1,28 @@ +/** + * Utility functions for error handling in Dropbox MCP module + */ + +import { DropboxMCPError, ErrorTypes, ErrorModules } from '../error.js'; + +/** + * Wraps a get-uri error into our standard error format + * @param error - The original error from get-uri + * @param path - The path that failed + * @throws DropboxMCPError - Always throws, never returns + */ +export function wrapGetUriError(error: unknown, path: string): never { + if (error instanceof Error) { + const code = (error as any).code ?? 'unknown'; + throw new DropboxMCPError( + ErrorTypes.GET_URI_ERROR, + ErrorModules.GET_URI, + `Failed to get URI for path "${path}". Status: ${code}, message: ${error.message}` + ); + } + + throw new DropboxMCPError( + ErrorTypes.GET_URI_ERROR, + ErrorModules.GET_URI, + `Failed to get URI for path "${path}". Unknown error occurred` + ); +} \ No newline at end of file diff --git a/mcp_servers/dropbox/src/utils/fetch-polyfill.ts b/mcp_servers/dropbox/src/utils/fetch-polyfill.ts new file mode 100644 index 00000000..e5b4e4d8 --- /dev/null +++ b/mcp_servers/dropbox/src/utils/fetch-polyfill.ts @@ -0,0 +1,33 @@ +// Polyfill for fetch Response.buffer() method - fixes compatibility with modern Node.js/Bun +// +// This addresses a compatibility issue where the Dropbox SDK expects the fetch Response object +// to have a buffer() method, which was available in older versions of node-fetch but not in +// the native fetch implementation in modern Node.js (18+) or Bun. +// +// The Dropbox SDK uses response.buffer() for file downloads, but native fetch only provides +// response.arrayBuffer(). This polyfill adds the missing buffer() method by converting +// arrayBuffer() results to Node.js Buffer objects. +// +// Related issues: +// - https://github.com/dropbox/dropbox-sdk-js/issues/1135 +// - https://github.com/dropbox/dropbox-sdk-js/pull/1138 (similar fix) +// +// Without this patch, file download operations (like download_file tool) would fail with: +// "TypeError: response.buffer is not a function" +export function patchFetchResponse() { + const originalFetch = global.fetch; + if (originalFetch) { + global.fetch = async function (...args: Parameters) { + const response = await originalFetch.apply(this, args); + + // Add buffer() method if it doesn't exist (for compatibility with Dropbox SDK) + if (!('buffer' in response) && typeof response.arrayBuffer === 'function') { + (response as any).buffer = function () { + return this.arrayBuffer().then((data: ArrayBuffer) => Buffer.from(data)); + }; + } + + return response; + }; + } +} diff --git a/mcp_servers/dropbox/src/utils/index.ts b/mcp_servers/dropbox/src/utils/index.ts new file mode 100644 index 00000000..82724a40 --- /dev/null +++ b/mcp_servers/dropbox/src/utils/index.ts @@ -0,0 +1,16 @@ +/** + * Utility functions for the Dropbox MCP server + */ + +// Context utilities +export { getDropboxClient } from './context.js'; + +// Error handling utilities +export { formatDropboxError } from './error-handling.js'; + +// Path and URL handling utilities +export { + isFolderPath, + dropboxResourceUriToPath, + pathToDropboxResourceUri +} from './path-url-handling.js'; diff --git a/mcp_servers/dropbox/src/utils/path-url-handling.ts b/mcp_servers/dropbox/src/utils/path-url-handling.ts new file mode 100644 index 00000000..b5b53273 --- /dev/null +++ b/mcp_servers/dropbox/src/utils/path-url-handling.ts @@ -0,0 +1,64 @@ +/** + * Utilities for handling Dropbox paths and URLs + */ + +/** + * Checks if a path represents a folder (ends with /) + * @param path - The path to check + * @returns True if the path represents a folder, false otherwise + */ +export function isFolderPath(path: string): boolean { + return path.endsWith('/'); +} + +/** + * Converts a dropbox:// resource URI to a file path + * @param uri - The dropbox:// URI to convert + * @returns The file path + * @throws Error if the URI is invalid + */ +export function dropboxResourceUriToPath(uri: string): string { + if (!uri.startsWith('dropbox://')) { + throw new Error('Invalid dropbox resource URI. Must start with dropbox://'); + } + + let path = uri.replace('dropbox://', '').trimEnd(); + + // Ensure the path starts with / + if (!path.startsWith('/')) { + path = `/${path}`; + } + + return path; +} + +/** + * Converts a file path to a dropbox:// resource URI + * @param path - The file path to convert + * @returns The dropbox:// resource URI + */ +export function pathToDropboxResourceUri(path: string): string { + // Ensure the path starts with / + if (!path.startsWith('/')) { + path = `/${path}`; + } + + return `dropbox://${path.startsWith('/') ? path.slice(1) : path}`; +} + +/** + * Extracts the filename from a file path + * @param path - The file path to extract filename from + * @returns The filename + * @throws Error if the path represents a folder + */ +export function getFilenameFromPath(path: string): string { + if (isFolderPath(path)) { + throw new Error('Cannot extract filename from folder path. Path represents a folder.'); + } + + const parts = path.split('/'); + const filename = parts[parts.length - 1]; + + return filename; +} diff --git a/mcp_servers/dropbox/tsconfig.json b/mcp_servers/dropbox/tsconfig.json new file mode 100644 index 00000000..f14c1b0c --- /dev/null +++ b/mcp_servers/dropbox/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "jsx": "react-jsx", + "outDir": "./build", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./**/*"], + "exclude": ["node_modules"] +} diff --git a/mcp_servers/exa/Dockerfile b/mcp_servers/exa/Dockerfile new file mode 100644 index 00000000..84bb4ca4 --- /dev/null +++ b/mcp_servers/exa/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/exa/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/exa/server.py . +COPY mcp_servers/exa/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/exa/README.md b/mcp_servers/exa/README.md new file mode 100644 index 00000000..28d7b4eb --- /dev/null +++ b/mcp_servers/exa/README.md @@ -0,0 +1,73 @@ +# Exa MCP Server + +A Model Context Protocol (MCP) server for Exa (formerly Metaphor) integration. Perform semantic web searches and content discovery using Exa's AI-powered search API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Exa with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("EXA", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/exa-mcp-server:latest + + +# Run Exa MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/exa-mcp-server:latest +``` + +**API Key Setup:** Get your Exa API key from the [Exa Dashboard](https://dashboard.exa.ai/). + +## šŸ› ļø Available Tools + +- **Semantic Search**: AI-powered semantic web search +- **Content Discovery**: Find relevant content and resources +- **Neural Search**: Advanced neural search capabilities +- **Similar Content**: Find content similar to given URLs or topics +- **Research Tools**: Academic and professional research assistance + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/exa/requirements.txt b/mcp_servers/exa/requirements.txt new file mode 100644 index 00000000..633424c0 --- /dev/null +++ b/mcp_servers/exa/requirements.txt @@ -0,0 +1,7 @@ +mcp==1.11.0 +exa_py==1.15.1 +python-dotenv==1.1.1 +httpx>=0.28.1 +click>=8.2.1 +starlette>=0.47.2 +uvicorn>=0.35.0 diff --git a/mcp_servers/exa/server.py b/mcp_servers/exa/server.py new file mode 100644 index 00000000..77f251e4 --- /dev/null +++ b/mcp_servers/exa/server.py @@ -0,0 +1,631 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import List +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + exa_search, + exa_get_contents, + exa_find_similar, + exa_answer, + exa_research +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +EXA_MCP_SERVER_PORT = int(os.getenv("EXA_MCP_SERVER_PORT", "5000")) + +# Context variable to store the API key for each request +api_key_context: ContextVar[str] = ContextVar('api_key') + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + auth_data = None + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +def get_api_key() -> str: + """Get the API key from context.""" + try: + return api_key_context.get() + except LookupError: + raise RuntimeError("API key not found in request context") + +@click.command() +@click.option("--port", default=EXA_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("exa-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="exa_search", + description=""" + Search the web for content using AI-powered semantic search or traditional keyword search. + + Use this tool when you need to find web pages, articles, or content related to a topic. + Exa's neural search understands meaning and context, making it excellent for research and content discovery. + Returns search results with URLs, titles, scores, and optional content text. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Required. The search query to find relevant web content." + }, + "num_results": { + "type": "integer", + "description": "Number of results to return (max 1000, default 10)." + }, + "include_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to include in search results (e.g., ['reddit.com', 'stackoverflow.com'])." + }, + "exclude_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to exclude from search results." + }, + "start_crawl_date": { + "type": "string", + "description": "Start date for crawl date filter (YYYY-MM-DD format)." + }, + "end_crawl_date": { + "type": "string", + "description": "End date for crawl date filter (YYYY-MM-DD format)." + }, + "start_published_date": { + "type": "string", + "description": "Start date for published date filter (YYYY-MM-DD format)." + }, + "end_published_date": { + "type": "string", + "description": "End date for published date filter (YYYY-MM-DD format)." + }, + "use_autoprompt": { + "type": "boolean", + "description": "Whether to use Exa's autoprompt feature to optimize the search query (default true)." + }, + "type": { + "type": "string", + "enum": ["neural", "keyword"], + "description": "Search type: 'neural' for AI-powered semantic search or 'keyword' for traditional keyword search." + }, + "category": { + "type": "string", + "description": "Category filter for results (e.g., 'news', 'research', 'company')." + }, + "include_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns that must be included in the results." + }, + "exclude_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns to exclude from the results." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "EXA_SEARCH", "readOnlyHint": True}) + ), + + types.Tool( + name="exa_get_contents", + description=""" + Get the full text content of web pages using their Exa search result IDs. + + Use this tool after performing a search to retrieve the actual text content from specific results. + Returns clean, parsed content with optional highlighting and summarization features. + Essential for reading the full content of interesting search results. + """, + inputSchema={ + "type": "object", + "properties": { + "ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Required. List of Exa result IDs to get contents for." + }, + "text": { + "type": "boolean", + "description": "Whether to include text content (default true)." + }, + "highlights": { + "type": "object", + "properties": { + "query": {"type": "string"}, + "num_sentences": {"type": "integer"} + }, + "description": "Highlighting options with query and number of sentences to highlight." + }, + "summary": { + "type": "object", + "properties": { + "query": {"type": "string"} + }, + "description": "Summary options with query for generating summaries." + } + }, + "required": ["ids"] + }, + annotations=types.ToolAnnotations(**{"category": "EXA_CONTENT", "readOnlyHint": True}) + ), + + types.Tool( + name="exa_find_similar", + description=""" + Discover web pages similar in meaning and content to a given URL. + + Use this tool when you have a specific webpage and want to find other pages with similar topics, + themes, or content. Perfect for content discovery, finding related articles, or expanding research + around a specific source. Returns semantically similar pages with relevance scores. + """, + inputSchema={ + "type": "object", + "properties": { + "url": { + "type": "string", + "description": "Required. The URL to find similar pages for." + }, + "num_results": { + "type": "integer", + "description": "Number of similar results to return (max 1000, default 10)." + }, + "include_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to include in search results." + }, + "exclude_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to exclude from search results." + }, + "start_crawl_date": { + "type": "string", + "description": "Start date for crawl date filter (YYYY-MM-DD format)." + }, + "end_crawl_date": { + "type": "string", + "description": "End date for crawl date filter (YYYY-MM-DD format)." + }, + "start_published_date": { + "type": "string", + "description": "Start date for published date filter (YYYY-MM-DD format)." + }, + "end_published_date": { + "type": "string", + "description": "End date for published date filter (YYYY-MM-DD format)." + }, + "exclude_source_domain": { + "type": "boolean", + "description": "Whether to exclude results from the same domain as the source URL (default true)." + }, + "category": { + "type": "string", + "description": "Category filter for results." + }, + "include_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns that must be included in the results." + }, + "exclude_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns to exclude from the results." + } + }, + "required": ["url"] + }, + annotations=types.ToolAnnotations(**{"category": "EXA_DISCOVERY", "readOnlyHint": True}) + ), + + types.Tool( + name="exa_answer", + description=""" + Get a direct answer to a specific question by searching and analyzing web sources. + + Use this tool when you need a focused answer to a specific question rather than general search results. + The tool searches relevant sources and provides a structured response with citations. + Ideal for fact-finding, research questions, and getting quick, sourced answers. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Required. The question to get a direct answer for." + }, + "include_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to include in the search for answering." + }, + "exclude_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to exclude from the search for answering." + }, + "start_crawl_date": { + "type": "string", + "description": "Start date for crawl date filter (YYYY-MM-DD format)." + }, + "end_crawl_date": { + "type": "string", + "description": "End date for crawl date filter (YYYY-MM-DD format)." + }, + "start_published_date": { + "type": "string", + "description": "Start date for published date filter (YYYY-MM-DD format)." + }, + "end_published_date": { + "type": "string", + "description": "End date for published date filter (YYYY-MM-DD format)." + }, + "use_autoprompt": { + "type": "boolean", + "description": "Whether to use Exa's autoprompt feature (default true)." + }, + "type": { + "type": "string", + "enum": ["neural", "keyword"], + "description": "Search type: 'neural' for AI-powered search or 'keyword' for traditional search." + }, + "category": { + "type": "string", + "description": "Category filter for results." + }, + "include_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns that must be included in search sources." + }, + "exclude_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns to exclude from search sources." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "EXA_QA", "readOnlyHint": True}) + ), + + types.Tool( + name="exa_research", + description=""" + Conduct comprehensive research on a topic with multiple sources and structured analysis. + + Use this tool for in-depth research projects that require gathering information from multiple + high-quality sources. Returns structured results with detailed content, citations, and analysis. + Perfect for academic research, market analysis, or thorough investigation of complex topics. + """, + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Required. The research topic or question to investigate comprehensively." + }, + "num_results": { + "type": "integer", + "description": "Number of sources to include in research (max 1000, default 10)." + }, + "include_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to prioritize in research sources." + }, + "exclude_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains to exclude from research sources." + }, + "start_crawl_date": { + "type": "string", + "description": "Start date for source date filter (YYYY-MM-DD format)." + }, + "end_crawl_date": { + "type": "string", + "description": "End date for source date filter (YYYY-MM-DD format)." + }, + "start_published_date": { + "type": "string", + "description": "Start date for published date filter (YYYY-MM-DD format)." + }, + "end_published_date": { + "type": "string", + "description": "End date for published date filter (YYYY-MM-DD format)." + }, + "use_autoprompt": { + "type": "boolean", + "description": "Whether to use Exa's autoprompt optimization (default true)." + }, + "type": { + "type": "string", + "enum": ["neural", "keyword"], + "description": "Research search type: 'neural' for AI-powered or 'keyword' for traditional." + }, + "category": { + "type": "string", + "description": "Category focus for research sources." + }, + "include_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns that sources must contain." + }, + "exclude_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text patterns to exclude from sources." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "EXA_RESEARCH", "readOnlyHint": True}) + ) + ] + + @app.call_tool() + async def call_tool( + name: str, + arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "exa_search": + try: + result = await exa_search( + query=arguments["query"], + num_results=arguments.get("num_results", 10), + include_domains=arguments.get("include_domains"), + exclude_domains=arguments.get("exclude_domains"), + start_crawl_date=arguments.get("start_crawl_date"), + end_crawl_date=arguments.get("end_crawl_date"), + start_published_date=arguments.get("start_published_date"), + end_published_date=arguments.get("end_published_date"), + use_autoprompt=arguments.get("use_autoprompt", True), + type=arguments.get("type", "neural"), + category=arguments.get("category"), + include_text=arguments.get("include_text"), + exclude_text=arguments.get("exclude_text") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in exa_search: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "exa_get_contents": + try: + result = await exa_get_contents( + ids=arguments["ids"], + text=arguments.get("text", True), + highlights=arguments.get("highlights"), + summary=arguments.get("summary") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in exa_get_contents: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "exa_find_similar": + try: + result = await exa_find_similar( + url=arguments["url"], + num_results=arguments.get("num_results", 10), + include_domains=arguments.get("include_domains"), + exclude_domains=arguments.get("exclude_domains"), + start_crawl_date=arguments.get("start_crawl_date"), + end_crawl_date=arguments.get("end_crawl_date"), + start_published_date=arguments.get("start_published_date"), + end_published_date=arguments.get("end_published_date"), + exclude_source_domain=arguments.get("exclude_source_domain", True), + category=arguments.get("category"), + include_text=arguments.get("include_text"), + exclude_text=arguments.get("exclude_text") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in exa_find_similar: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "exa_answer": + try: + result = await exa_answer( + query=arguments["query"], + include_domains=arguments.get("include_domains"), + exclude_domains=arguments.get("exclude_domains"), + start_crawl_date=arguments.get("start_crawl_date"), + end_crawl_date=arguments.get("end_crawl_date"), + start_published_date=arguments.get("start_published_date"), + end_published_date=arguments.get("end_published_date"), + use_autoprompt=arguments.get("use_autoprompt", True), + type=arguments.get("type", "neural"), + category=arguments.get("category"), + include_text=arguments.get("include_text"), + exclude_text=arguments.get("exclude_text") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in exa_answer: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "exa_research": + try: + result = await exa_research( + query=arguments["query"], + num_results=arguments.get("num_results", 10), + include_domains=arguments.get("include_domains"), + exclude_domains=arguments.get("exclude_domains"), + start_crawl_date=arguments.get("start_crawl_date"), + end_crawl_date=arguments.get("end_crawl_date"), + start_published_date=arguments.get("start_published_date"), + end_published_date=arguments.get("end_published_date"), + use_autoprompt=arguments.get("use_autoprompt", True), + type=arguments.get("type", "neural"), + category=arguments.get("category"), + include_text=arguments.get("include_text"), + exclude_text=arguments.get("exclude_text") + ) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error in exa_research: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + else: + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = api_key_context.set(api_key) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + api_key_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = api_key_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + api_key_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/exa/tools/__init__.py b/mcp_servers/exa/tools/__init__.py new file mode 100644 index 00000000..0f948344 --- /dev/null +++ b/mcp_servers/exa/tools/__init__.py @@ -0,0 +1,20 @@ +from .base import ( + auth_token_context +) + +from .search import ( + exa_search, + exa_get_contents, + exa_find_similar, + exa_answer, + exa_research +) + +__all__ = [ + "auth_token_context", + "exa_search", + "exa_get_contents", + "exa_find_similar", + "exa_answer", + "exa_research" +] diff --git a/mcp_servers/exa/tools/base.py b/mcp_servers/exa/tools/base.py new file mode 100644 index 00000000..c274109c --- /dev/null +++ b/mcp_servers/exa/tools/base.py @@ -0,0 +1,49 @@ +import logging +import os +from contextvars import ContextVar +from typing import Optional +from dotenv import load_dotenv +from exa_py import Exa + +# Load env vars from .env +load_dotenv() + +logger = logging.getLogger(__name__) + +# Context variable to store the auth token per request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """ + Get the Exa API token from context or fallback to env. + """ + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable + token = os.getenv("EXA_API_KEY") + logger.debug(f"Using token from environment: {'***' if token else 'None'}") + if not token: + raise RuntimeError("No Exa auth token found in context or environment") + return token + except LookupError: + # Context variable not set at all + token = os.getenv("EXA_API_KEY") + if not token: + raise RuntimeError("No Exa auth token found in context or environment") + return token + +def get_exa_client() -> Optional[Exa]: + """ + Return an Exa client instance ready to use. + """ + try: + auth_token = get_auth_token() + client = Exa(api_key=auth_token) + return client + except RuntimeError as e: + logger.warning(f"Failed to get Exa auth token: {e}") + return None + except Exception as e: + logger.error(f"Failed to initialize Exa client: {e}") + return None diff --git a/mcp_servers/exa/tools/search.py b/mcp_servers/exa/tools/search.py new file mode 100644 index 00000000..d9840152 --- /dev/null +++ b/mcp_servers/exa/tools/search.py @@ -0,0 +1,464 @@ +import logging +from typing import List, Optional, Dict, Any +from .base import get_exa_client + +# Configure logging +logger = logging.getLogger(__name__) + + +async def exa_search( + query: str, + num_results: int = 10, + include_domains: Optional[List[str]] = None, + exclude_domains: Optional[List[str]] = None, + start_crawl_date: Optional[str] = None, + end_crawl_date: Optional[str] = None, + start_published_date: Optional[str] = None, + end_published_date: Optional[str] = None, + use_autoprompt: bool = True, + type: str = "neural", + category: Optional[str] = None, + include_text: Optional[List[str]] = None, + exclude_text: Optional[List[str]] = None +) -> Dict[str, Any]: + """ + Perform an Exa search query using neural or keyword search. + + Args: + query (str): [Required] The search query. + num_results (int): Number of results to return (max 1000, default 10). + include_domains (List[str]): Domains to include in search results. + exclude_domains (List[str]): Domains to exclude from search results. + start_crawl_date (str): Start date for crawl date filter (YYYY-MM-DD). + end_crawl_date (str): End date for crawl date filter (YYYY-MM-DD). + start_published_date (str): Start date for published date filter (YYYY-MM-DD). + end_published_date (str): End date for published date filter (YYYY-MM-DD). + use_autoprompt (bool): Whether to use autoprompt (default True). + type (str): Search type - 'neural' or 'keyword' (default 'neural'). + category (str): Category filter for results. + include_text (List[str]): Text patterns to include in results. + exclude_text (List[str]): Text patterns to exclude from results. + + Returns: + Dict[str, Any]: Search results with URLs, titles, and metadata. + """ + try: + client = get_exa_client() + if not client: + return {"error": "Could not initialize Exa client"} + + # Build search parameters + search_params = { + "query": query, + "num_results": num_results, + "use_autoprompt": use_autoprompt, + "type": type + } + + # Add optional parameters if provided + if include_domains: + search_params["include_domains"] = include_domains + if exclude_domains: + search_params["exclude_domains"] = exclude_domains + if start_crawl_date: + search_params["start_crawl_date"] = start_crawl_date + if end_crawl_date: + search_params["end_crawl_date"] = end_crawl_date + if start_published_date: + search_params["start_published_date"] = start_published_date + if end_published_date: + search_params["end_published_date"] = end_published_date + if category: + search_params["category"] = category + if include_text: + search_params["include_text"] = include_text + if exclude_text: + search_params["exclude_text"] = exclude_text + + logger.info(f"Sending Exa search request: {query}") + result = client.search_and_contents(**search_params) + + # Convert to dict for JSON serialization + response = { + "results": [ + { + "url": r.url, + "title": r.title, + "id": r.id, + "score": r.score, + "published_date": r.published_date, + "author": r.author, + "text": r.text if hasattr(r, 'text') else None + } + for r in result.results + ], + "autoprompt_string": result.autoprompt_string if hasattr(result, 'autoprompt_string') else None + } + + logger.info("Received Exa search response") + return response + + except Exception as e: + logger.error(f"Exa search failed: {e}") + return {"error": f"Could not complete Exa search for query: {query}. Error: {str(e)}"} + + +async def exa_get_contents( + ids: List[str], + text: bool = True, + highlights: Optional[Dict[str, Any]] = None, + summary: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Get contents for specific Exa search result IDs. + + Args: + ids (List[str]): [Required] List of Exa result IDs to get contents for. + text (bool): Whether to include text content (default True). + highlights (Dict): Highlighting options with query and num_sentences. + summary (Dict): Summary options with query. + + Returns: + Dict[str, Any]: Contents for the specified IDs. + """ + try: + client = get_exa_client() + if not client: + return {"error": "Could not initialize Exa client"} + + # Build parameters + params = { + "ids": ids, + "text": text + } + + if highlights: + params["highlights"] = highlights + if summary: + params["summary"] = summary + + logger.info(f"Getting Exa contents for {len(ids)} IDs") + result = client.get_contents(**params) + + # Convert to dict for JSON serialization + response = { + "results": [ + { + "url": r.url, + "title": r.title, + "id": r.id, + "text": r.text if hasattr(r, 'text') and text else None, + "highlights": r.highlights if hasattr(r, 'highlights') else None, + "summary": r.summary if hasattr(r, 'summary') else None, + "published_date": r.published_date, + "author": r.author + } + for r in result.results + ] + } + + logger.info("Received Exa contents response") + return response + + except Exception as e: + logger.error(f"Exa get contents failed: {e}") + return {"error": f"Could not get Exa contents for IDs: {ids}. Error: {str(e)}"} + + +async def exa_find_similar( + url: str, + num_results: int = 10, + include_domains: Optional[List[str]] = None, + exclude_domains: Optional[List[str]] = None, + start_crawl_date: Optional[str] = None, + end_crawl_date: Optional[str] = None, + start_published_date: Optional[str] = None, + end_published_date: Optional[str] = None, + exclude_source_domain: bool = True, + category: Optional[str] = None, + include_text: Optional[List[str]] = None, + exclude_text: Optional[List[str]] = None +) -> Dict[str, Any]: + """ + Find pages similar to a given URL. + + Args: + url (str): [Required] The URL to find similar pages for. + num_results (int): Number of results to return (max 1000, default 10). + include_domains (List[str]): Domains to include in search results. + exclude_domains (List[str]): Domains to exclude from search results. + start_crawl_date (str): Start date for crawl date filter (YYYY-MM-DD). + end_crawl_date (str): End date for crawl date filter (YYYY-MM-DD). + start_published_date (str): Start date for published date filter (YYYY-MM-DD). + end_published_date (str): End date for published date filter (YYYY-MM-DD). + exclude_source_domain (bool): Whether to exclude the source domain (default True). + category (str): Category filter for results. + include_text (List[str]): Text patterns to include in results. + exclude_text (List[str]): Text patterns to exclude from results. + + Returns: + Dict[str, Any]: Similar pages with URLs, titles, and metadata. + """ + try: + client = get_exa_client() + if not client: + return {"error": "Could not initialize Exa client"} + + # Build parameters + params = { + "url": url, + "num_results": num_results, + "exclude_source_domain": exclude_source_domain + } + + # Add optional parameters if provided + if include_domains: + params["include_domains"] = include_domains + if exclude_domains: + params["exclude_domains"] = exclude_domains + if start_crawl_date: + params["start_crawl_date"] = start_crawl_date + if end_crawl_date: + params["end_crawl_date"] = end_crawl_date + if start_published_date: + params["start_published_date"] = start_published_date + if end_published_date: + params["end_published_date"] = end_published_date + if category: + params["category"] = category + if include_text: + params["include_text"] = include_text + if exclude_text: + params["exclude_text"] = exclude_text + + logger.info(f"Finding similar pages to: {url}") + result = client.find_similar(**params) + + # Convert to dict for JSON serialization + response = { + "results": [ + { + "url": r.url, + "title": r.title, + "id": r.id, + "score": r.score, + "published_date": r.published_date, + "author": r.author + } + for r in result.results + ] + } + + logger.info("Received Exa find similar response") + return response + + except Exception as e: + logger.error(f"Exa find similar failed: {e}") + return {"error": f"Could not find similar pages for URL: {url}. Error: {str(e)}"} + + +async def exa_answer( + query: str, + include_domains: Optional[List[str]] = None, + exclude_domains: Optional[List[str]] = None, + start_crawl_date: Optional[str] = None, + end_crawl_date: Optional[str] = None, + start_published_date: Optional[str] = None, + end_published_date: Optional[str] = None, + use_autoprompt: bool = True, + type: str = "neural", + category: Optional[str] = None, + include_text: Optional[List[str]] = None, + exclude_text: Optional[List[str]] = None +) -> Dict[str, Any]: + """ + Get a direct answer to a question by performing a search and extracting key information. + + Note: This implements answer functionality using search_and_contents since the Answer API + endpoint may not be available in the current exa_py version. + + Args: + query (str): [Required] The question to answer. + include_domains (List[str]): Domains to include in search. + exclude_domains (List[str]): Domains to exclude from search. + start_crawl_date (str): Start date for crawl date filter (YYYY-MM-DD). + end_crawl_date (str): End date for crawl date filter (YYYY-MM-DD). + start_published_date (str): Start date for published date filter (YYYY-MM-DD). + end_published_date (str): End date for published date filter (YYYY-MM-DD). + use_autoprompt (bool): Whether to use autoprompt (default True). + type (str): Search type - 'neural' or 'keyword' (default 'neural'). + category (str): Category filter for results. + include_text (List[str]): Text patterns to include in results. + exclude_text (List[str]): Text patterns to exclude from results. + + Returns: + Dict[str, Any]: Search results formatted as an answer with sources. + """ + try: + client = get_exa_client() + if not client: + return {"error": "Could not initialize Exa client"} + + # Build search parameters + search_params = { + "query": query, + "num_results": 5, # Limit to top 5 for answer generation + "use_autoprompt": use_autoprompt, + "type": type + } + + # Add optional parameters if provided + if include_domains: + search_params["include_domains"] = include_domains + if exclude_domains: + search_params["exclude_domains"] = exclude_domains + if start_crawl_date: + search_params["start_crawl_date"] = start_crawl_date + if end_crawl_date: + search_params["end_crawl_date"] = end_crawl_date + if start_published_date: + search_params["start_published_date"] = start_published_date + if end_published_date: + search_params["end_published_date"] = end_published_date + if category: + search_params["category"] = category + if include_text: + search_params["include_text"] = include_text + if exclude_text: + search_params["exclude_text"] = exclude_text + + logger.info(f"Getting Exa answer-style search for: {query}") + result = client.search_and_contents(**search_params) + + # Format as answer-style response + response = { + "query": query, + "answer_type": "search_based", + "note": "This answer is generated from search results. For the best answers, review the sources provided.", + "sources": [ + { + "url": r.url, + "title": r.title, + "id": r.id, + "score": r.score, + "published_date": r.published_date, + "author": r.author, + "text": r.text[:500] + "..." if hasattr(r, 'text') and r.text and len(r.text) > 500 else r.text if hasattr(r, 'text') else None + } + for r in result.results + ], + "autoprompt_string": result.autoprompt_string if hasattr(result, 'autoprompt_string') else None, + "total_sources": len(result.results) + } + + logger.info("Received Exa answer-style response") + return response + + except Exception as e: + logger.error(f"Exa answer failed: {e}") + return {"error": f"Could not get Exa answer for query: {query}. Error: {str(e)}"} + + +async def exa_research( + query: str, + num_results: int = 10, + include_domains: Optional[List[str]] = None, + exclude_domains: Optional[List[str]] = None, + start_crawl_date: Optional[str] = None, + end_crawl_date: Optional[str] = None, + start_published_date: Optional[str] = None, + end_published_date: Optional[str] = None, + use_autoprompt: bool = True, + type: str = "neural", + category: Optional[str] = None, + include_text: Optional[List[str]] = None, + exclude_text: Optional[List[str]] = None +) -> Dict[str, Any]: + """ + Automate in-depth web research and receive structured JSON results with citations. + + Args: + query (str): [Required] The research query. + num_results (int): Number of results to return (max 1000, default 10). + include_domains (List[str]): Domains to include in search. + exclude_domains (List[str]): Domains to exclude from search. + start_crawl_date (str): Start date for crawl date filter (YYYY-MM-DD). + end_crawl_date (str): End date for crawl date filter (YYYY-MM-DD). + start_published_date (str): Start date for published date filter (YYYY-MM-DD). + end_published_date (str): End date for published date filter (YYYY-MM-DD). + use_autoprompt (bool): Whether to use autoprompt (default True). + type (str): Search type - 'neural' or 'keyword' (default 'neural'). + category (str): Category filter for results. + include_text (List[str]): Text patterns to include in results. + exclude_text (List[str]): Text patterns to exclude from results. + + Returns: + Dict[str, Any]: Structured research results with detailed analysis and citations. + """ + try: + client = get_exa_client() + if not client: + return {"error": "Could not initialize Exa client"} + + # For research, we'll combine search and content retrieval for comprehensive results + # Build search parameters + search_params = { + "query": query, + "num_results": num_results, + "use_autoprompt": use_autoprompt, + "type": type + } + + # Add optional parameters if provided + if include_domains: + search_params["include_domains"] = include_domains + if exclude_domains: + search_params["exclude_domains"] = exclude_domains + if start_crawl_date: + search_params["start_crawl_date"] = start_crawl_date + if end_crawl_date: + search_params["end_crawl_date"] = end_crawl_date + if start_published_date: + search_params["start_published_date"] = start_published_date + if end_published_date: + search_params["end_published_date"] = end_published_date + if category: + search_params["category"] = category + if include_text: + search_params["include_text"] = include_text + if exclude_text: + search_params["exclude_text"] = exclude_text + + logger.info(f"Conducting Exa research for: {query}") + + # Get search results with content + result = client.search_and_contents(**search_params) + + # Structure the research response + response = { + "query": query, + "research_summary": f"Research conducted on '{query}' yielding {len(result.results)} sources", + "sources": [ + { + "url": r.url, + "title": r.title, + "id": r.id, + "score": r.score, + "published_date": r.published_date, + "author": r.author, + "text": r.text if hasattr(r, 'text') else None, + "relevance_score": r.score + } + for r in result.results + ], + "autoprompt_string": result.autoprompt_string if hasattr(result, 'autoprompt_string') else None, + "total_sources": len(result.results), + "research_timestamp": "Generated via Exa Research API" + } + + logger.info("Completed Exa research") + return response + + except Exception as e: + logger.error(f"Exa research failed: {e}") + return {"error": f"Could not complete Exa research for query: {query}. Error: {str(e)}"} diff --git a/mcp_servers/figma/.env.example b/mcp_servers/figma/.env.example new file mode 100644 index 00000000..522c13de --- /dev/null +++ b/mcp_servers/figma/.env.example @@ -0,0 +1,5 @@ +# Figma API Configuration +FIGMA_API_KEY=your_figma_personal_access_token_here + +# MCP Server Configuration +FIGMA_MCP_SERVER_PORT=5002 \ No newline at end of file diff --git a/mcp_servers/figma/Dockerfile b/mcp_servers/figma/Dockerfile new file mode 100644 index 00000000..a7b12a65 --- /dev/null +++ b/mcp_servers/figma/Dockerfile @@ -0,0 +1,33 @@ +# Use Python 3.12 slim image +FROM python:3.12-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first to leverage Docker cache +COPY mcp_servers/figma/requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the Figma MCP server code +COPY mcp_servers/figma/ . + +# Copy environment file (optional, can be overridden at runtime) +COPY mcp_servers/figma/.env.example .env + +# Expose the port +EXPOSE 5002 + +# Set environment variables +ENV PYTHONPATH=/app +ENV PYTHONUNBUFFERED=1 + +# Run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/figma/README.md b/mcp_servers/figma/README.md new file mode 100644 index 00000000..45afe8c4 --- /dev/null +++ b/mcp_servers/figma/README.md @@ -0,0 +1,504 @@ +# Figma MCP Server + +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +[![Python: 3.12+](https://img.shields.io/badge/Python-3.12+-blue.svg)](https://www.python.org/downloads/) +[![FastAPI](https://img.shields.io/badge/FastAPI-0.100.0+-00a393.svg)](https://fastapi.tiangolo.com/) +[![Figma API](https://img.shields.io/badge/Figma_API-REST-FF7262.svg)](https://www.figma.com/developers/api) + +## šŸ“– Overview + +Figma MCP Server is a Model Context Protocol (MCP) implementation that bridges language models and other applications with Figma's REST API. It provides a standardized interface for executing design operations through various tools defined by the MCP standard. + +## šŸš€ Features + +This server provides comprehensive design workflow capabilities through MCP tools: + +### Authentication & Account Management +| Tool | Description | +|------|-------------| +| `figma_test_connection` | Test API connection and verify authentication | +| `figma_get_current_user` | Get authenticated user information and profile | + +### File Management +| Tool | Description | +|------|-------------| +| `figma_get_file` | Get complete file content including document structure | +| `figma_get_file_nodes` | Get specific nodes from a file | +| `figma_get_file_images` | Export images from file nodes in various formats | +| `figma_get_file_versions` | Get file version history and metadata | + +### Project Management (āš ļø Limited) +| Tool | Description | +|------|-------------| +| `figma_get_team_projects` | Get projects for a team (requires team ID) | +| `figma_get_project_files` | Get files in a project (requires project ID) | + +### Comment Management +| Tool | Description | +|------|-------------| +| `figma_get_file_comments` | Get comments from a file | +| `figma_post_file_comment` | Add comments to files and nodes | +| `figma_delete_comment` | Remove comments from files | + +### Variables (Design Tokens) +| Tool | Description | +|------|-------------| +| `figma_get_local_variables` | Get local variables from a file | +| `figma_get_published_variables` | Get published variables (design tokens) | +| `figma_post_variables` | Create or update variables and collections | + +### Dev Resources +| Tool | Description | +|------|-------------| +| `figma_get_dev_resources` | Get dev resources attached to file nodes | +| `figma_post_dev_resources` | Create new dev resources | +| `figma_put_dev_resource` | Update existing dev resources | +| `figma_delete_dev_resource` | Remove dev resources | + +### Webhooks +| Tool | Description | +|------|-------------| +| `figma_get_team_webhooks` | Get webhooks for a team (requires team ID) | +| `figma_post_webhook` | Create webhooks for team events | +| `figma_put_webhook` | Update webhook configurations | +| `figma_delete_webhook` | Remove webhooks | + +### Library Analytics +| Tool | Description | +|------|-------------| +| `figma_get_library_analytics` | Get usage analytics for published libraries | + +## āš ļø Important Limitations + +**Please read this carefully before using the Figma MCP Server:** + +### Major API Limitations +1. **No Team/Project Discovery**: The Figma API does not provide endpoints to list teams or projects that a user has access to +2. **Manual ID Collection Required**: Users must manually provide: + - **Team IDs** (from team URLs: `https://www.figma.com/files/team/{team_id}/`) + - **Project IDs** (from project pages) + - **File Keys** (from file URLs: `https://www.figma.com/file/{file_key}/filename`) + +### Workarounds +- **File Keys**: Extract from Figma URLs: `https://www.figma.com/file/ABC123DEF456/My-Design` → file_key is `ABC123DEF456` +- **Team IDs**: Found in team URLs: `https://www.figma.com/files/team/123456789/` → team_id is `123456789` +- **Project IDs**: Available through the team projects endpoint once you have a team ID + +## šŸ“§ Prerequisites + +You'll need one of the following: + +- **Docker:** Docker installed and running (recommended) +- **Python:** Python 3.12+ with pip + +## āš™ļø Setup & Configuration + +### Figma API Key Setup + +1. **Log into your Figma account**: + - Navigate to [Figma Settings](https://www.figma.com/settings) + - Go to "Account" tab + +2. **Generate a Personal Access Token**: + - Scroll down to "Personal access tokens" + - Click "Create new token" + - Give your token a descriptive name (e.g., "MCP Server Integration") + - Select appropriate scopes (see below) + - Click "Create token" + - **Important**: Copy the generated token immediately and store it securely + - You won't be able to see the full token again after this step + +3. **Required Scopes**: + For full functionality, your token should have these scopes: + - `files:read` - Read file content + - `file_variables:read` - Read variables + - `file_variables:write` - Write variables + - `file_dev_resources:read` - Read dev resources + - `file_dev_resources:write` - Write dev resources + - `file_comments:read` - Read comments + - `file_comments:write` - Write comments + - `library_analytics:read` - Read library analytics + - `webhooks:write` - Manage webhooks + +### Environment Configuration + +1. **Create your environment file**: + ```bash + cp .env.example .env + ``` + +2. **Edit the `.env` file** with your Figma credentials: + ``` + FIGMA_API_KEY=your_actual_figma_personal_access_token_here + FIGMA_MCP_SERVER_PORT=5002 + ``` + + **Important**: Replace `your_actual_figma_personal_access_token_here` with your actual token from step 2 above. + +## šŸƒā€ā™‚ļø Running the Server + +### Option 1: Docker (Recommended) + +The Docker build must be run from the project root directory (`klavis/`): + +```bash +# Navigate to the root directory of the project +cd /path/to/klavis + +# Build the Docker image +docker build -t figma-mcp-server -f mcp_servers/figma/Dockerfile . + +# Run the container +docker run -d -p 5002:5002 --name figma-mcp figma-mcp-server +``` + +To use your local .env file instead of building it into the image: + +```bash +docker run -d -p 5002:5002 --env-file mcp_servers/figma/.env --name figma-mcp figma-mcp-server +``` + +### Option 2: Python Virtual Environment + +```bash +# Navigate to the Figma server directory +cd mcp_servers/figma + +# Create and activate virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt + +# Run the server +python server.py +``` + +Once running, the server will be accessible at `http://localhost:5002`. + +## šŸ”Œ API Usage + +The server implements the Model Context Protocol (MCP) standard. Here's an example of how to call a tool: + +```python +import httpx + +async def call_figma_tool(): + url = "http://localhost:5002/mcp" + payload = { + "tool_name": "figma_get_file", + "tool_args": { + "file_key": "ABC123DEF456", + "depth": 1 + } + } + + async with httpx.AsyncClient() as client: + response = await client.post(url, json=payload) + result = response.json() + return result +``` + +## šŸ“‹ Common Operations + +### Testing Connection + +```python +payload = { + "tool_name": "figma_test_connection", + "tool_args": {} +} +``` + +### Getting File Content + +```python +payload = { + "tool_name": "figma_get_file", + "tool_args": { + "file_key": "ABC123DEF456", # Extract from Figma URL + "depth": 2, + "geometry": "paths" + } +} +``` + +### Exporting Images + +```python +payload = { + "tool_name": "figma_get_file_images", + "tool_args": { + "file_key": "ABC123DEF456", + "ids": "123:456,789:012", # Node IDs from the file + "format": "png", + "scale": 2 + } +} +``` + +### Getting Team Projects (Requires Team ID) + +```python +payload = { + "tool_name": "figma_get_team_projects", + "tool_args": { + "team_id": "123456789" # From team URL + } +} +``` + +### Working with Comments + +```python +# Get comments +get_comments_payload = { + "tool_name": "figma_get_file_comments", + "tool_args": { + "file_key": "ABC123DEF456" + } +} + +# Add a comment +post_comment_payload = { + "tool_name": "figma_post_file_comment", + "tool_args": { + "file_key": "ABC123DEF456", + "message": "This needs to be updated", + "client_meta": { + "x": 100, + "y": 200, + "node_id": "123:456" + } + } +} +``` + +### Working with Variables (Design Tokens) + +```python +# Get published variables +get_vars_payload = { + "tool_name": "figma_get_published_variables", + "tool_args": { + "file_key": "ABC123DEF456" + } +} + +# Create/update variables +post_vars_payload = { + "tool_name": "figma_post_variables", + "tool_args": { + "file_key": "ABC123DEF456", + "variableCollections": [ + { + "action": "CREATE", + "name": "Colors", + "modes": [{"name": "Light"}, {"name": "Dark"}] + } + ], + "variables": [ + { + "action": "CREATE", + "name": "Primary Color", + "variableCollectionId": "collection_id", + "resolvedType": "COLOR" + } + ] + } +} +``` + +## šŸ› ļø Troubleshooting + +### Common Issues + +- **Authentication Failures**: + - Verify your Personal Access Token is correct and hasn't expired + - Ensure your token has the required scopes for the operations you're trying to perform + - Check that your token hasn't been revoked in Figma settings + +- **Missing Team/Project IDs**: + - Extract team IDs from Figma team URLs manually + - Use the `figma_get_team_projects` endpoint to discover project IDs + - File keys can be found in any Figma file URL + +- **File Access Errors**: + - Ensure you have proper permissions to access the file/team/project + - Some operations require edit permissions, not just view permissions + - Files in private teams may not be accessible with personal access tokens + +- **Rate Limiting**: + - Figma enforces rate limits based on your plan and authentication method + - Personal access tokens have per-user limits + - OAuth applications have global application limits + - Implement appropriate delays between requests if hitting limits + +- **Node ID Issues**: + - Node IDs change when files are modified + - Use the file structure from `figma_get_file` to find current node IDs + - Node IDs are strings like "123:456" not just numbers + +### Docker Build Issues + +- **File Not Found Errors**: If you see errors during Docker build, make sure you're building from the root project directory (`klavis/`), not from the server directory. + +### Finding Required IDs + +**To find File Keys:** +``` +Figma URL: https://www.figma.com/file/ABC123DEF456/My-Design-File +File Key: ABC123DEF456 +``` + +**To find Team IDs:** +``` +Team URL: https://www.figma.com/files/team/123456789/Team-Name +Team ID: 123456789 +``` + +**To find Node IDs:** +1. Use `figma_get_file` to get the file structure +2. Navigate through the document tree to find specific nodes +3. Each node has an `id` field that you can use for other operations + +## šŸ“Š API Limits and Best Practices + +### Rate Limits +- **Personal Access Tokens**: Per-user limits (varies by plan) +- **OAuth Applications**: Global application limits +- **Enterprise Plans**: Higher limits available + +### Best Practices +- **Cache File Data**: File content doesn't change frequently, cache when possible +- **Use Specific Node IDs**: Instead of getting entire files, request specific nodes when possible +- **Batch Operations**: Group related operations to minimize API calls +- **Error Handling**: Always implement proper error handling for API calls +- **Respect Rate Limits**: Monitor response headers for rate limit information + +## šŸ”„ Figma API Workflow Examples + +### Complete Design Handoff Workflow + +```python +# 1. Get file structure +file_structure = { + "tool_name": "figma_get_file", + "tool_args": { + "file_key": "ABC123DEF456", + "depth": 2 + } +} + +# 2. Export design assets +export_assets = { + "tool_name": "figma_get_file_images", + "tool_args": { + "file_key": "ABC123DEF456", + "ids": "123:456,789:012", + "format": "png", + "scale": 2 + } +} + +# 3. Get design tokens +design_tokens = { + "tool_name": "figma_get_published_variables", + "tool_args": { + "file_key": "ABC123DEF456" + } +} + +# 4. Add dev resources +dev_resources = { + "tool_name": "figma_post_dev_resources", + "tool_args": { + "file_key": "ABC123DEF456", + "dev_resources": [ + { + "name": "Component Documentation", + "url": "https://storybook.example.com/component", + "node_id": "123:456" + } + ] + } +} +``` + +### Library Management Workflow + +```python +# 1. Get library analytics +analytics = { + "tool_name": "figma_get_library_analytics", + "tool_args": { + "file_key": "LIBRARY_FILE_KEY" + } +} + +# 2. Update variables based on usage +update_vars = { + "tool_name": "figma_post_variables", + "tool_args": { + "file_key": "LIBRARY_FILE_KEY", + "variableCollections": [...], + "variables": [...] + } +} + +# 3. Set up webhook for library updates +webhook = { + "tool_name": "figma_post_webhook", + "tool_args": { + "team_id": "123456789", + "event_type": "LIBRARY_PUBLISH", + "endpoint": "https://your-app.com/figma-webhook", + "description": "Library update notifications" + } +} +``` + +## šŸ¤ Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. + +1. Fork the repository +2. Create your feature branch (`git checkout -b feature/amazing-feature`) +3. Commit your changes (`git commit -m 'Add some amazing feature'`) +4. Push to the branch (`git push origin feature/amazing-feature`) +5. Open a Pull Request + +## šŸ“œ License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## šŸ”— Related Links + +- [Figma REST API Documentation](https://www.figma.com/developers/api) +- [Figma OpenAPI Specification](https://github.com/figma/rest-api-spec) +- [Model Context Protocol (MCP) Specification](https://github.com/modelcontextprotocol/specification) +- [Figma Developer Portal](https://www.figma.com/developers/) +- [Figma API Quick Start Guide](https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens) + +## šŸŽÆ Use Cases + +This MCP server is perfect for: + +- **Design System Management**: Sync design tokens and components across tools +- **Automated Design Handoffs**: Export assets and specifications programmatically +- **Design Workflow Integration**: Connect Figma to project management and development tools +- **Library Analytics**: Track usage of design system components +- **Comment Management**: Automate design review processes +- **Webhook Integration**: React to file changes and library updates in real-time +- **Dev Resource Management**: Link design components to documentation and code + +## šŸ’” Tips for Success + +1. **Start Small**: Begin with simple file reading operations before moving to complex workflows +2. **Understand the Hierarchy**: Learn Figma's node structure (Document → Page → Frame → Layer) +3. **Use Figma URLs**: Extract file keys and team IDs directly from Figma URLs +4. **Monitor Rate Limits**: Keep track of your API usage to avoid hitting limits +5. **Cache Strategically**: Cache file data and node structures that don't change frequently +6. **Handle Errors Gracefully**: Figma API errors can be informative - read the error messages +7. **Test with Simple Files**: Use simple test files to understand the API before working with complex designs \ No newline at end of file diff --git a/mcp_servers/figma/requirements.txt b/mcp_servers/figma/requirements.txt new file mode 100644 index 00000000..e7aa1f25 --- /dev/null +++ b/mcp_servers/figma/requirements.txt @@ -0,0 +1,10 @@ +mcp>=1.12.0 +fastapi +uvicorn[standard] +click>=8.0.0 +pydantic>=2.5.0 +aiohttp>=3.8.0 +httpx>=0.27.0 +python-dotenv>=1.0.0 +typing-extensions +starlette>=0.27.0 \ No newline at end of file diff --git a/mcp_servers/figma/server.py b/mcp_servers/figma/server.py new file mode 100644 index 00000000..1bc561f1 --- /dev/null +++ b/mcp_servers/figma/server.py @@ -0,0 +1,580 @@ +import os +import json +import logging +import asyncio +from typing import Any, Dict + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server + +from tools import ( + figma_token_context, + get_current_user, + test_figma_connection, + get_file, + get_file_nodes, + get_file_images, + get_file_versions, + get_team_projects, + get_project_files, + get_file_comments, + post_file_comment, + delete_comment, + get_local_variables, + get_published_variables, + post_variables, + get_dev_resources, + post_dev_resources, + put_dev_resource, + delete_dev_resource, + get_team_webhooks, + post_webhook, + put_webhook, + delete_webhook, + get_library_analytics, +) + +# Load env early +load_dotenv() + +logger = logging.getLogger("figma-mcp-server") +logging.basicConfig(level=logging.INFO) + +FIGMA_API_KEY = os.getenv("FIGMA_API_KEY") or "" + +async def run_server(log_level: str = "INFO"): + """Run the Figma MCP server with stdio transport for Claude Desktop.""" + logging.getLogger().setLevel(getattr(logging, log_level.upper(), logging.INFO)) + + # Set the API key in context + if FIGMA_API_KEY: + figma_token_context.set(FIGMA_API_KEY) + logger.info("Figma API key configured") + else: + logger.warning("No Figma API key found in environment") + + app = Server("figma-mcp-server") + + # ----------------------------- Tool Registry -----------------------------# + @app.list_tools() + async def list_tools() -> list[types.Tool]: + """List all available Figma tools.""" + tools = [ + # Auth/Account tools + types.Tool( + name="figma_test_connection", + description="Test Figma Connection - verify API authentication is working correctly.", + inputSchema={"type": "object", "properties": {}} + ), + types.Tool( + name="figma_get_current_user", + description="Get Current User - retrieve information about the authenticated user.", + inputSchema={"type": "object", "properties": {}} + ), + + # File management tools + types.Tool( + name="figma_get_file", + description="Get File Content - retrieve the full content of a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "version": {"type": "string", "description": "Specific version to retrieve"}, + "ids": {"type": "string", "description": "Comma-separated list of node IDs to filter"}, + "depth": {"type": "integer", "description": "How deep to traverse the document tree", "minimum": 1}, + "geometry": {"type": "string", "enum": ["paths", "no_geometry"], "description": "Whether to include geometry data"}, + "plugin_data": {"type": "string", "description": "Plugin data to include"}, + "branch_data": {"type": "boolean", "description": "Whether to include branch data"} + } + } + ), + types.Tool( + name="figma_get_file_nodes", + description="Get Specific File Nodes - retrieve specific nodes from a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key", "ids"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "ids": {"type": "string", "description": "Comma-separated list of node IDs"}, + "version": {"type": "string", "description": "Specific version to retrieve"}, + "depth": {"type": "integer", "description": "How deep to traverse the node tree", "minimum": 1}, + "geometry": {"type": "string", "enum": ["paths", "no_geometry"], "description": "Whether to include geometry data"}, + "plugin_data": {"type": "string", "description": "Plugin data to include"} + } + } + ), + types.Tool( + name="figma_get_file_images", + description="Get File Images - export images from specific nodes in a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key", "ids"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "ids": {"type": "string", "description": "Comma-separated list of node IDs to export"}, + "scale": {"type": "number", "description": "Scale factor for export", "minimum": 0.01, "maximum": 4}, + "format": {"type": "string", "enum": ["jpg", "png", "svg", "pdf"], "description": "Export format"}, + "svg_include_id": {"type": "boolean", "description": "Include node IDs in SVG"}, + "svg_simplify_stroke": {"type": "boolean", "description": "Simplify strokes in SVG"}, + "use_absolute_bounds": {"type": "boolean", "description": "Use absolute bounds for export"}, + "version": {"type": "string", "description": "Specific version to export"} + } + } + ), + types.Tool( + name="figma_get_file_versions", + description="Get File Versions - retrieve version history for a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"} + } + } + ), + + # Project management tools + types.Tool( + name="figma_get_team_projects", + description="Get Team Projects - retrieve all projects for a team (requires team ID).", + inputSchema={ + "type": "object", + "required": ["team_id"], + "properties": { + "team_id": {"type": "string", "description": "The team ID (from Figma team URL)"} + } + } + ), + types.Tool( + name="figma_get_project_files", + description="Get Project Files - retrieve all files in a project (requires project ID).", + inputSchema={ + "type": "object", + "required": ["project_id"], + "properties": { + "project_id": {"type": "string", "description": "The project ID"}, + "branch_data": {"type": "boolean", "description": "Whether to include branch data"} + } + } + ), + + # Comment management tools + types.Tool( + name="figma_get_file_comments", + description="Get File Comments - retrieve comments from a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "as_md": {"type": "boolean", "description": "Return comments as markdown"} + } + } + ), + types.Tool( + name="figma_post_file_comment", + description="Post File Comment - add a comment to a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key", "message", "client_meta"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "message": {"type": "string", "description": "The comment message"}, + "client_meta": {"type": "object", "description": "Client metadata including x, y coordinates and node_id"}, + "comment_id": {"type": "string", "description": "Parent comment ID for replies"} + } + } + ), + types.Tool( + name="figma_delete_comment", + description="Delete Comment - remove a comment from a Figma file.", + inputSchema={ + "type": "object", + "required": ["comment_id"], + "properties": { + "comment_id": {"type": "string", "description": "The comment ID to delete"} + } + } + ), + + # Variables (Design Tokens) management tools + types.Tool( + name="figma_get_local_variables", + description="Get Local Variables - retrieve local variables from a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"} + } + } + ), + types.Tool( + name="figma_get_published_variables", + description="Get Published Variables - retrieve published variables from a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"} + } + } + ), + types.Tool( + name="figma_post_variables", + description="Create/Update Variables - create or update variables in a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key", "variableCollections", "variables"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "variableCollections": {"type": "array", "description": "Array of variable collections to create/update"}, + "variables": {"type": "array", "description": "Array of variables to create/update"} + } + } + ), + + # Dev Resources management tools + types.Tool( + name="figma_get_dev_resources", + description="Get Dev Resources - retrieve dev resources from a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "node_id": {"type": "string", "description": "Filter by specific node ID"} + } + } + ), + types.Tool( + name="figma_post_dev_resources", + description="Create Dev Resources - add dev resources to a Figma file.", + inputSchema={ + "type": "object", + "required": ["file_key", "dev_resources"], + "properties": { + "file_key": {"type": "string", "description": "The file key (from Figma URL)"}, + "dev_resources": {"type": "array", "description": "Array of dev resources to create"} + } + } + ), + types.Tool( + name="figma_put_dev_resource", + description="Update Dev Resource - modify an existing dev resource.", + inputSchema={ + "type": "object", + "required": ["dev_resource_id"], + "properties": { + "dev_resource_id": {"type": "string", "description": "The dev resource ID"}, + "name": {"type": "string", "description": "New name for the dev resource"}, + "url": {"type": "string", "description": "New URL for the dev resource"} + } + } + ), + types.Tool( + name="figma_delete_dev_resource", + description="Delete Dev Resource - remove a dev resource from Figma.", + inputSchema={ + "type": "object", + "required": ["dev_resource_id"], + "properties": { + "dev_resource_id": {"type": "string", "description": "The dev resource ID to delete"} + } + } + ), + + # Webhook management tools + types.Tool( + name="figma_get_team_webhooks", + description="Get Team Webhooks - retrieve webhooks for a team (requires team ID).", + inputSchema={ + "type": "object", + "required": ["team_id"], + "properties": { + "team_id": {"type": "string", "description": "The team ID (from Figma team URL)"} + } + } + ), + types.Tool( + name="figma_post_webhook", + description="Create Webhook - create a new webhook for team events.", + inputSchema={ + "type": "object", + "required": ["team_id", "event_type", "endpoint"], + "properties": { + "team_id": {"type": "string", "description": "The team ID (from Figma team URL)"}, + "event_type": {"type": "string", "enum": ["PING", "FILE_UPDATE", "FILE_DELETE", "FILE_VERSION_UPDATE", "LIBRARY_PUBLISH"], "description": "Type of event to listen for"}, + "endpoint": {"type": "string", "description": "HTTP endpoint to receive webhook notifications"}, + "passcode": {"type": "string", "description": "Passcode for webhook verification"}, + "description": {"type": "string", "description": "Description of the webhook"} + } + } + ), + types.Tool( + name="figma_put_webhook", + description="Update Webhook - modify an existing webhook.", + inputSchema={ + "type": "object", + "required": ["webhook_id"], + "properties": { + "webhook_id": {"type": "string", "description": "The webhook ID"}, + "event_type": {"type": "string", "enum": ["PING", "FILE_UPDATE", "FILE_DELETE", "FILE_VERSION_UPDATE", "LIBRARY_PUBLISH"], "description": "Type of event to listen for"}, + "endpoint": {"type": "string", "description": "HTTP endpoint to receive webhook notifications"}, + "passcode": {"type": "string", "description": "Passcode for webhook verification"}, + "description": {"type": "string", "description": "Description of the webhook"} + } + } + ), + types.Tool( + name="figma_delete_webhook", + description="Delete Webhook - remove a webhook.", + inputSchema={ + "type": "object", + "required": ["webhook_id"], + "properties": { + "webhook_id": {"type": "string", "description": "The webhook ID to delete"} + } + } + ), + + # Library Analytics tools + types.Tool( + name="figma_get_library_analytics", + description="Get Library Analytics - retrieve usage analytics for a published library.", + inputSchema={ + "type": "object", + "required": ["file_key"], + "properties": { + "file_key": {"type": "string", "description": "The file key of a published library (from Figma URL)"} + } + } + ), + ] + + logger.info(f"Returning {len(tools)} tools") + return tools + + # ---------------------------- Tool Dispatcher ----------------------------# + @app.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]) -> list[types.TextContent]: + logger.info(f"Calling tool: {name}") + + try: + # Auth/Account tools + if name == "figma_test_connection": + result = await test_figma_connection() + elif name == "figma_get_current_user": + result = await get_current_user() + + # File tools + elif name == "figma_get_file": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_file( + file_key=arguments["file_key"], + version=arguments.get("version"), + ids=arguments.get("ids"), + depth=arguments.get("depth"), + geometry=arguments.get("geometry"), + plugin_data=arguments.get("plugin_data"), + branch_data=arguments.get("branch_data") + ) + elif name == "figma_get_file_nodes": + if not all([arguments.get("file_key"), arguments.get("ids")]): + raise ValueError("Missing required arguments: file_key and ids") + result = await get_file_nodes( + file_key=arguments["file_key"], + ids=arguments["ids"], + version=arguments.get("version"), + depth=arguments.get("depth"), + geometry=arguments.get("geometry"), + plugin_data=arguments.get("plugin_data") + ) + elif name == "figma_get_file_images": + if not all([arguments.get("file_key"), arguments.get("ids")]): + raise ValueError("Missing required arguments: file_key and ids") + result = await get_file_images( + file_key=arguments["file_key"], + ids=arguments["ids"], + scale=arguments.get("scale"), + format=arguments.get("format"), + svg_include_id=arguments.get("svg_include_id"), + svg_simplify_stroke=arguments.get("svg_simplify_stroke"), + use_absolute_bounds=arguments.get("use_absolute_bounds"), + version=arguments.get("version") + ) + elif name == "figma_get_file_versions": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_file_versions(arguments["file_key"]) + + # Project tools + elif name == "figma_get_team_projects": + if not arguments.get("team_id"): + raise ValueError("Missing required argument: team_id") + result = await get_team_projects(arguments["team_id"]) + elif name == "figma_get_project_files": + if not arguments.get("project_id"): + raise ValueError("Missing required argument: project_id") + result = await get_project_files( + project_id=arguments["project_id"], + branch_data=arguments.get("branch_data") + ) + + # Comment tools + elif name == "figma_get_file_comments": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_file_comments( + file_key=arguments["file_key"], + as_md=arguments.get("as_md") + ) + elif name == "figma_post_file_comment": + required_args = ["file_key", "message", "client_meta"] + for arg in required_args: + if arg not in arguments: + raise ValueError(f"Missing required argument: {arg}") + result = await post_file_comment( + file_key=arguments["file_key"], + message=arguments["message"], + client_meta=arguments["client_meta"], + comment_id=arguments.get("comment_id") + ) + elif name == "figma_delete_comment": + if not arguments.get("comment_id"): + raise ValueError("Missing required argument: comment_id") + result = await delete_comment(arguments["comment_id"]) + + # Variables tools + elif name == "figma_get_local_variables": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_local_variables(arguments["file_key"]) + elif name == "figma_get_published_variables": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_published_variables(arguments["file_key"]) + elif name == "figma_post_variables": + required_args = ["file_key", "variableCollections", "variables"] + for arg in required_args: + if arg not in arguments: + raise ValueError(f"Missing required argument: {arg}") + result = await post_variables( + file_key=arguments["file_key"], + variableCollections=arguments["variableCollections"], + variables=arguments["variables"] + ) + + # Dev Resources tools + elif name == "figma_get_dev_resources": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_dev_resources( + file_key=arguments["file_key"], + node_id=arguments.get("node_id") + ) + elif name == "figma_post_dev_resources": + if not all([arguments.get("file_key"), arguments.get("dev_resources")]): + raise ValueError("Missing required arguments: file_key and dev_resources") + result = await post_dev_resources( + file_key=arguments["file_key"], + dev_resources=arguments["dev_resources"] + ) + elif name == "figma_put_dev_resource": + if not arguments.get("dev_resource_id"): + raise ValueError("Missing required argument: dev_resource_id") + result = await put_dev_resource( + dev_resource_id=arguments["dev_resource_id"], + name=arguments.get("name"), + url=arguments.get("url") + ) + elif name == "figma_delete_dev_resource": + if not arguments.get("dev_resource_id"): + raise ValueError("Missing required argument: dev_resource_id") + result = await delete_dev_resource(arguments["dev_resource_id"]) + + # Webhook tools + elif name == "figma_get_team_webhooks": + if not arguments.get("team_id"): + raise ValueError("Missing required argument: team_id") + result = await get_team_webhooks(arguments["team_id"]) + elif name == "figma_post_webhook": + required_args = ["team_id", "event_type", "endpoint"] + for arg in required_args: + if arg not in arguments: + raise ValueError(f"Missing required argument: {arg}") + result = await post_webhook( + team_id=arguments["team_id"], + event_type=arguments["event_type"], + endpoint=arguments["endpoint"], + passcode=arguments.get("passcode"), + description=arguments.get("description") + ) + elif name == "figma_put_webhook": + if not arguments.get("webhook_id"): + raise ValueError("Missing required argument: webhook_id") + result = await put_webhook( + webhook_id=arguments["webhook_id"], + event_type=arguments.get("event_type"), + endpoint=arguments.get("endpoint"), + passcode=arguments.get("passcode"), + description=arguments.get("description") + ) + elif name == "figma_delete_webhook": + if not arguments.get("webhook_id"): + raise ValueError("Missing required argument: webhook_id") + result = await delete_webhook(arguments["webhook_id"]) + + # Library Analytics tools + elif name == "figma_get_library_analytics": + if not arguments.get("file_key"): + raise ValueError("Missing required argument: file_key") + result = await get_library_analytics(arguments["file_key"]) + + else: + error_msg = f"Unknown tool: {name}" + logger.error(error_msg) + return [types.TextContent(type="text", text=json.dumps({"error": error_msg}))] + + logger.info(f"Tool {name} executed successfully") + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + error_response = { + "error": f"Tool execution failed: {str(e)}", + "tool": name, + "arguments": arguments + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + + # Run with stdio transport for Claude Desktop + logger.info("Starting Figma MCP server with stdio transport") + async with stdio_server() as (read_stream, write_stream): + await app.run(read_stream, write_stream, app.create_initialization_options()) + + +@click.command() +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +def main(log_level: str) -> int: + """Figma MCP server with stdio transport for Claude Desktop.""" + try: + asyncio.run(run_server(log_level)) + return 0 + except KeyboardInterrupt: + logger.info("Server stopped by user") + return 0 + except Exception as e: + logger.error(f"Server error: {e}") + return 1 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/figma/tools/__init__.py b/mcp_servers/figma/tools/__init__.py new file mode 100644 index 00000000..5c139063 --- /dev/null +++ b/mcp_servers/figma/tools/__init__.py @@ -0,0 +1,81 @@ +from .auth import get_current_user, test_figma_connection +from .files import ( + get_file, + get_file_nodes, + get_file_images, + get_file_versions +) +from .projects import ( + get_team_projects, + get_project_files +) +from .comments import ( + get_file_comments, + post_file_comment, + delete_comment +) +from .variables import ( + get_local_variables, + get_published_variables, + post_variables +) +from .dev_resources import ( + get_dev_resources, + post_dev_resources, + put_dev_resource, + delete_dev_resource +) +from .webhooks import ( + get_team_webhooks, + post_webhook, + put_webhook, + delete_webhook +) +from .library_analytics import ( + get_library_analytics +) +from .base import figma_token_context + +__all__ = [ + # Auth/Account + "get_current_user", + "test_figma_connection", + + # Files + "get_file", + "get_file_nodes", + "get_file_images", + "get_file_versions", + + # Projects + "get_team_projects", + "get_project_files", + + # Comments + "get_file_comments", + "post_file_comment", + "delete_comment", + + # Variables + "get_local_variables", + "get_published_variables", + "post_variables", + + # Dev Resources + "get_dev_resources", + "post_dev_resources", + "put_dev_resource", + "delete_dev_resource", + + # Webhooks + "get_team_webhooks", + "post_webhook", + "put_webhook", + "delete_webhook", + + # Library Analytics + "get_library_analytics", + + # Base + "figma_token_context", +] \ No newline at end of file diff --git a/mcp_servers/figma/tools/auth.py b/mcp_servers/figma/tools/auth.py new file mode 100644 index 00000000..b08ae15e --- /dev/null +++ b/mcp_servers/figma/tools/auth.py @@ -0,0 +1,69 @@ +import logging +from typing import Any, Dict +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_current_user() -> Dict[str, Any]: + """Get information about the current user (me endpoint).""" + logger.info("Executing tool: get_current_user") + try: + endpoint = "/v1/me" + + user_data = await make_figma_request("GET", endpoint) + + result = { + "id": user_data.get("id"), + "email": user_data.get("email"), + "handle": user_data.get("handle"), + "img_url": user_data.get("img_url"), + "location": user_data.get("location"), + "company": user_data.get("company"), + "bio": user_data.get("bio"), + "website": user_data.get("website"), + "public_profile": user_data.get("public_profile"), + "created_at": user_data.get("created_at"), + "updated_at": user_data.get("updated_at") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_current_user: {e}") + return { + "error": "Failed to retrieve current user information", + "exception": str(e) + } + +async def test_figma_connection() -> Dict[str, Any]: + """Test the connection to Figma API and verify authentication.""" + logger.info("Executing tool: test_figma_connection") + try: + # Use the /v1/me endpoint to test authentication + user_data = await get_current_user() + + if "error" in user_data: + return { + "status": "error", + "message": "Failed to connect to Figma API", + "error": user_data["error"] + } + + result = { + "status": "success", + "message": "Figma API connection successful", + "authenticated_user": { + "id": user_data.get("id"), + "handle": user_data.get("handle"), + "email": user_data.get("email") + } + } + + return result + except Exception as e: + logger.exception(f"Error executing tool test_figma_connection: {e}") + return { + "status": "error", + "message": "Failed to connect to Figma API", + "error": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/base.py b/mcp_servers/figma/tools/base.py new file mode 100644 index 00000000..6fca4de0 --- /dev/null +++ b/mcp_servers/figma/tools/base.py @@ -0,0 +1,105 @@ +import os +import logging +import ssl +from typing import Any, Dict, Optional +from contextvars import ContextVar +import aiohttp + +# Configure logging +logger = logging.getLogger(__name__) + +# Context variable to store the Figma API key for each request +figma_token_context: ContextVar[str] = ContextVar('figma_token') + +def get_figma_api_key() -> str: + """Get the Figma API key from context or environment.""" + try: + # Try to get from context first (for MCP server usage) + return figma_token_context.get() + except LookupError: + # Fall back to environment variable (for standalone usage) + api_key = os.getenv("FIGMA_API_KEY") + if not api_key: + raise RuntimeError("Figma API key not found in request context or environment") + return api_key + +def _get_figma_base_url() -> str: + """Get the base URL for Figma API.""" + return "https://api.figma.com/" + +def _get_figma_headers() -> Dict[str, str]: + """Create standard headers for Figma API calls.""" + api_key = get_figma_api_key() + return { + "Content-Type": "application/json", + "X-Figma-Token": api_key + } + +def _get_ssl_context(): + """Create secure SSL context.""" + return ssl.create_default_context() + +async def make_figma_request( + method: str, + endpoint: str, + json_data: Optional[Dict] = None, + params: Optional[Dict] = None, + expect_empty_response: bool = False +) -> Any: + """ + Makes an HTTP request to the Figma API. + + Args: + method: HTTP method (GET, POST, PUT, DELETE) + endpoint: API endpoint (should start with /) + json_data: JSON payload for POST/PUT requests + params: Query parameters for GET requests + expect_empty_response: Whether to expect an empty response (for some operations) + + Returns: + Response data as dict, or None for empty responses + """ + base_url = _get_figma_base_url() + url = f"{base_url}{endpoint}" + headers = _get_figma_headers() + + connector = aiohttp.TCPConnector(ssl=_get_ssl_context()) + async with aiohttp.ClientSession(headers=headers, connector=connector) as session: + try: + async with session.request(method, url, json=json_data, params=params) as response: + response.raise_for_status() + + if expect_empty_response: + if response.status in [200, 201, 204]: + return None + else: + logger.warning(f"Expected empty response for {method} {endpoint}, but got status {response.status}") + try: + return await response.json() + except aiohttp.ContentTypeError: + return await response.text() + else: + if 'application/json' in response.headers.get('Content-Type', ''): + return await response.json() + else: + text_content = await response.text() + logger.warning(f"Received non-JSON response for {method} {endpoint}: {text_content[:100]}...") + return {"raw_content": text_content} + + except aiohttp.ClientResponseError as e: + logger.error(f"Figma API request failed: {e.status} {e.message} for {method} {url}") + error_details = e.message + try: + error_body = await e.response.json() + if 'err' in error_body: + error_details = error_body['err'] + elif 'message' in error_body: + error_details = error_body['message'] + else: + error_details = f"{e.message} - {error_body}" + except Exception: + pass + raise RuntimeError(f"Figma API Error ({e.status}): {error_details}") from e + except Exception as e: + logger.error(f"An unexpected error occurred during Figma API request: {e}") + raise RuntimeError(f"Unexpected error during API call to {method} {url}") from e \ No newline at end of file diff --git a/mcp_servers/figma/tools/comments.py b/mcp_servers/figma/tools/comments.py new file mode 100644 index 00000000..cec3653a --- /dev/null +++ b/mcp_servers/figma/tools/comments.py @@ -0,0 +1,107 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_file_comments(file_key: str, as_md: Optional[bool] = None) -> Dict[str, Any]: + """Get comments from a file.""" + logger.info(f"Executing tool: get_file_comments with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/comments" + params = {} + + if as_md is not None: + params["as_md"] = as_md + + comments_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "comments": [] + } + + for comment in comments_data.get("comments", []): + comment_info = { + "id": comment.get("id"), + "file_key": comment.get("file_key"), + "parent_id": comment.get("parent_id"), + "user": comment.get("user", {}), + "created_at": comment.get("created_at"), + "resolved_at": comment.get("resolved_at"), + "message": comment.get("message"), + "client_meta": comment.get("client_meta", {}), + "order_id": comment.get("order_id") + } + result["comments"].append(comment_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_file_comments: {e}") + return { + "error": "Failed to retrieve file comments", + "file_key": file_key, + "exception": str(e) + } + +async def post_file_comment(file_key: str, message: str, client_meta: Dict[str, Any], + comment_id: Optional[str] = None) -> Dict[str, Any]: + """Post a comment to a file.""" + logger.info(f"Executing tool: post_file_comment with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/comments" + + payload = { + "message": message, + "client_meta": client_meta + } + + if comment_id: + payload["comment_id"] = comment_id + + comment_data = await make_figma_request("POST", endpoint, json_data=payload) + + result = { + "id": comment_data.get("id"), + "file_key": comment_data.get("file_key"), + "parent_id": comment_data.get("parent_id"), + "user": comment_data.get("user", {}), + "created_at": comment_data.get("created_at"), + "resolved_at": comment_data.get("resolved_at"), + "message": comment_data.get("message"), + "client_meta": comment_data.get("client_meta", {}), + "order_id": comment_data.get("order_id") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool post_file_comment: {e}") + return { + "error": "Failed to post comment", + "file_key": file_key, + "message": message, + "exception": str(e) + } + +async def delete_comment(comment_id: str) -> Dict[str, Any]: + """Delete a comment.""" + logger.info(f"Executing tool: delete_comment with comment_id: {comment_id}") + try: + endpoint = f"/v1/comments/{comment_id}" + + await make_figma_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Comment {comment_id} has been deleted", + "comment_id": comment_id + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_comment: {e}") + return { + "error": "Failed to delete comment", + "comment_id": comment_id, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/dev_resources.py b/mcp_servers/figma/tools/dev_resources.py new file mode 100644 index 00000000..abd9d3f4 --- /dev/null +++ b/mcp_servers/figma/tools/dev_resources.py @@ -0,0 +1,143 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_dev_resources(file_key: str, node_id: Optional[str] = None) -> Dict[str, Any]: + """Get dev resources from a file.""" + logger.info(f"Executing tool: get_dev_resources with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/dev_resources" + params = {} + + if node_id: + params["node_id"] = node_id + + dev_resources_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "dev_resources": [] + } + + for resource in dev_resources_data.get("dev_resources", []): + resource_info = { + "id": resource.get("id"), + "name": resource.get("name"), + "url": resource.get("url"), + "node_id": resource.get("node_id"), + "file_key": resource.get("file_key"), + "created_at": resource.get("created_at"), + "updated_at": resource.get("updated_at") + } + result["dev_resources"].append(resource_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_dev_resources: {e}") + return { + "error": "Failed to retrieve dev resources", + "file_key": file_key, + "exception": str(e) + } + +async def post_dev_resources(file_key: str, dev_resources: List[Dict[str, Any]]) -> Dict[str, Any]: + """Create dev resources for a file.""" + logger.info(f"Executing tool: post_dev_resources with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/dev_resources" + + payload = { + "dev_resources": dev_resources + } + + dev_resources_data = await make_figma_request("POST", endpoint, json_data=payload) + + result = { + "dev_resources": [] + } + + for resource in dev_resources_data.get("dev_resources", []): + resource_info = { + "id": resource.get("id"), + "name": resource.get("name"), + "url": resource.get("url"), + "node_id": resource.get("node_id"), + "file_key": resource.get("file_key"), + "created_at": resource.get("created_at"), + "updated_at": resource.get("updated_at") + } + result["dev_resources"].append(resource_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool post_dev_resources: {e}") + return { + "error": "Failed to create dev resources", + "file_key": file_key, + "exception": str(e) + } + +async def put_dev_resource(dev_resource_id: str, name: Optional[str] = None, + url: Optional[str] = None) -> Dict[str, Any]: + """Update a dev resource.""" + logger.info(f"Executing tool: put_dev_resource with dev_resource_id: {dev_resource_id}") + try: + endpoint = f"/v1/dev_resources/{dev_resource_id}" + + payload = {} + if name: + payload["name"] = name + if url: + payload["url"] = url + + if not payload: + return { + "error": "No update parameters provided", + "dev_resource_id": dev_resource_id + } + + dev_resource_data = await make_figma_request("PUT", endpoint, json_data=payload) + + result = { + "id": dev_resource_data.get("id"), + "name": dev_resource_data.get("name"), + "url": dev_resource_data.get("url"), + "node_id": dev_resource_data.get("node_id"), + "file_key": dev_resource_data.get("file_key"), + "created_at": dev_resource_data.get("created_at"), + "updated_at": dev_resource_data.get("updated_at") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool put_dev_resource: {e}") + return { + "error": "Failed to update dev resource", + "dev_resource_id": dev_resource_id, + "exception": str(e) + } + +async def delete_dev_resource(dev_resource_id: str) -> Dict[str, Any]: + """Delete a dev resource.""" + logger.info(f"Executing tool: delete_dev_resource with dev_resource_id: {dev_resource_id}") + try: + endpoint = f"/v1/dev_resources/{dev_resource_id}" + + await make_figma_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Dev resource {dev_resource_id} has been deleted", + "dev_resource_id": dev_resource_id + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_dev_resource: {e}") + return { + "error": "Failed to delete dev resource", + "dev_resource_id": dev_resource_id, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/files.py b/mcp_servers/figma/tools/files.py new file mode 100644 index 00000000..4cee455b --- /dev/null +++ b/mcp_servers/figma/tools/files.py @@ -0,0 +1,169 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_file(file_key: str, version: Optional[str] = None, ids: Optional[str] = None, + depth: Optional[int] = None, geometry: Optional[str] = None, + plugin_data: Optional[str] = None, branch_data: Optional[bool] = None) -> Dict[str, Any]: + """Get file content by file key.""" + logger.info(f"Executing tool: get_file with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}" + params = {} + + if version: + params["version"] = version + if ids: + params["ids"] = ids + if depth is not None: + params["depth"] = depth + if geometry: + params["geometry"] = geometry + if plugin_data: + params["plugin_data"] = plugin_data + if branch_data is not None: + params["branch_data"] = branch_data + + file_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "name": file_data.get("name"), + "role": file_data.get("role"), + "lastModified": file_data.get("lastModified"), + "editorType": file_data.get("editorType"), + "thumbnailUrl": file_data.get("thumbnailUrl"), + "version": file_data.get("version"), + "document": file_data.get("document", {}), + "components": file_data.get("components", {}), + "componentSets": file_data.get("componentSets", {}), + "schemaVersion": file_data.get("schemaVersion"), + "styles": file_data.get("styles", {}), + "mainFileKey": file_data.get("mainFileKey"), + "branches": file_data.get("branches", []) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_file: {e}") + return { + "error": "Failed to retrieve file", + "file_key": file_key, + "exception": str(e) + } + +async def get_file_nodes(file_key: str, ids: str, version: Optional[str] = None, + depth: Optional[int] = None, geometry: Optional[str] = None, + plugin_data: Optional[str] = None) -> Dict[str, Any]: + """Get specific nodes from a file.""" + logger.info(f"Executing tool: get_file_nodes with file_key: {file_key}, ids: {ids}") + try: + endpoint = f"/v1/files/{file_key}/nodes" + params = {"ids": ids} + + if version: + params["version"] = version + if depth is not None: + params["depth"] = depth + if geometry: + params["geometry"] = geometry + if plugin_data: + params["plugin_data"] = plugin_data + + nodes_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "name": nodes_data.get("name"), + "role": nodes_data.get("role"), + "lastModified": nodes_data.get("lastModified"), + "editorType": nodes_data.get("editorType"), + "thumbnailUrl": nodes_data.get("thumbnailUrl"), + "version": nodes_data.get("version"), + "nodes": nodes_data.get("nodes", {}), + "err": nodes_data.get("err") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_file_nodes: {e}") + return { + "error": "Failed to retrieve file nodes", + "file_key": file_key, + "node_ids": ids, + "exception": str(e) + } + +async def get_file_images(file_key: str, ids: str, scale: Optional[float] = None, + format: Optional[str] = None, svg_include_id: Optional[bool] = None, + svg_simplify_stroke: Optional[bool] = None, use_absolute_bounds: Optional[bool] = None, + version: Optional[str] = None) -> Dict[str, Any]: + """Get images from specific nodes in a file.""" + logger.info(f"Executing tool: get_file_images with file_key: {file_key}, ids: {ids}") + try: + endpoint = f"/v1/images/{file_key}" + params = {"ids": ids} + + if scale is not None: + params["scale"] = scale + if format: + params["format"] = format + if svg_include_id is not None: + params["svg_include_id"] = svg_include_id + if svg_simplify_stroke is not None: + params["svg_simplify_stroke"] = svg_simplify_stroke + if use_absolute_bounds is not None: + params["use_absolute_bounds"] = use_absolute_bounds + if version: + params["version"] = version + + images_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "err": images_data.get("err"), + "images": images_data.get("images", {}), + "status": images_data.get("status") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_file_images: {e}") + return { + "error": "Failed to retrieve file images", + "file_key": file_key, + "node_ids": ids, + "exception": str(e) + } + +async def get_file_versions(file_key: str) -> Dict[str, Any]: + """Get version history for a file.""" + logger.info(f"Executing tool: get_file_versions with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/versions" + + versions_data = await make_figma_request("GET", endpoint) + + result = { + "versions": [] + } + + for version in versions_data.get("versions", []): + version_info = { + "id": version.get("id"), + "created_at": version.get("created_at"), + "label": version.get("label"), + "description": version.get("description"), + "user": version.get("user", {}), + "thumbnail_url": version.get("thumbnail_url") + } + result["versions"].append(version_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_file_versions: {e}") + return { + "error": "Failed to retrieve file versions", + "file_key": file_key, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/library_analytics.py b/mcp_servers/figma/tools/library_analytics.py new file mode 100644 index 00000000..f38b7d59 --- /dev/null +++ b/mcp_servers/figma/tools/library_analytics.py @@ -0,0 +1,68 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_library_analytics(file_key: str) -> Dict[str, Any]: + """Get analytics data for a published library.""" + logger.info(f"Executing tool: get_library_analytics with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/library_analytics" + + analytics_data = await make_figma_request("GET", endpoint) + + result = { + "meta": analytics_data.get("meta", {}), + "components": [], + "component_sets": [], + "styles": [] + } + + # Process components analytics + for component in analytics_data.get("components", []): + component_info = { + "node_id": component.get("node_id"), + "name": component.get("name"), + "created_at": component.get("created_at"), + "updated_at": component.get("updated_at"), + "usage_count": component.get("usage_count", 0), + "consuming_teams_count": component.get("consuming_teams_count", 0) + } + result["components"].append(component_info) + + # Process component sets analytics + for component_set in analytics_data.get("component_sets", []): + component_set_info = { + "node_id": component_set.get("node_id"), + "name": component_set.get("name"), + "created_at": component_set.get("created_at"), + "updated_at": component_set.get("updated_at"), + "usage_count": component_set.get("usage_count", 0), + "consuming_teams_count": component_set.get("consuming_teams_count", 0) + } + result["component_sets"].append(component_set_info) + + # Process styles analytics + for style in analytics_data.get("styles", []): + style_info = { + "node_id": style.get("node_id"), + "name": style.get("name"), + "style_type": style.get("style_type"), + "created_at": style.get("created_at"), + "updated_at": style.get("updated_at"), + "usage_count": style.get("usage_count", 0), + "consuming_teams_count": style.get("consuming_teams_count", 0) + } + result["styles"].append(style_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_library_analytics: {e}") + return { + "error": "Failed to retrieve library analytics", + "file_key": file_key, + "note": "Make sure this is a published library file and you have access to analytics", + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/projects.py b/mcp_servers/figma/tools/projects.py new file mode 100644 index 00000000..0a873dc9 --- /dev/null +++ b/mcp_servers/figma/tools/projects.py @@ -0,0 +1,71 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_team_projects(team_id: str) -> Dict[str, Any]: + """Get all projects for a team.""" + logger.info(f"Executing tool: get_team_projects with team_id: {team_id}") + try: + endpoint = f"/v1/teams/{team_id}/projects" + + projects_data = await make_figma_request("GET", endpoint) + + result = { + "projects": [] + } + + for project in projects_data.get("projects", []): + project_info = { + "id": project.get("id"), + "name": project.get("name") + } + result["projects"].append(project_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_team_projects: {e}") + return { + "error": "Failed to retrieve team projects", + "team_id": team_id, + "note": "Make sure the team_id is correct and you have access to this team", + "exception": str(e) + } + +async def get_project_files(project_id: str, branch_data: Optional[bool] = None) -> Dict[str, Any]: + """Get all files in a project.""" + logger.info(f"Executing tool: get_project_files with project_id: {project_id}") + try: + endpoint = f"/v1/projects/{project_id}/files" + params = {} + + if branch_data is not None: + params["branch_data"] = branch_data + + files_data = await make_figma_request("GET", endpoint, params=params) + + result = { + "files": [] + } + + for file in files_data.get("files", []): + file_info = { + "key": file.get("key"), + "name": file.get("name"), + "thumbnail_url": file.get("thumbnail_url"), + "last_modified": file.get("last_modified"), + "branches": file.get("branches", []) + } + result["files"].append(file_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_project_files: {e}") + return { + "error": "Failed to retrieve project files", + "project_id": project_id, + "note": "Make sure the project_id is correct and you have access to this project", + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/variables.py b/mcp_servers/figma/tools/variables.py new file mode 100644 index 00000000..defcf2f4 --- /dev/null +++ b/mcp_servers/figma/tools/variables.py @@ -0,0 +1,87 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_local_variables(file_key: str) -> Dict[str, Any]: + """Get local variables from a file.""" + logger.info(f"Executing tool: get_local_variables with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/variables/local" + + variables_data = await make_figma_request("GET", endpoint) + + result = { + "status": variables_data.get("status"), + "error": variables_data.get("error"), + "meta": variables_data.get("meta", {}), + "variables": variables_data.get("variables", {}), + "variableCollections": variables_data.get("variableCollections", {}) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_local_variables: {e}") + return { + "error": "Failed to retrieve local variables", + "file_key": file_key, + "exception": str(e) + } + +async def get_published_variables(file_key: str) -> Dict[str, Any]: + """Get published variables from a file.""" + logger.info(f"Executing tool: get_published_variables with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/variables/published" + + variables_data = await make_figma_request("GET", endpoint) + + result = { + "status": variables_data.get("status"), + "error": variables_data.get("error"), + "meta": variables_data.get("meta", {}), + "variables": variables_data.get("variables", {}), + "variableCollections": variables_data.get("variableCollections", {}) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_published_variables: {e}") + return { + "error": "Failed to retrieve published variables", + "file_key": file_key, + "exception": str(e) + } + +async def post_variables(file_key: str, variableCollections: List[Dict[str, Any]], + variables: List[Dict[str, Any]]) -> Dict[str, Any]: + """Create or update variables in a file.""" + logger.info(f"Executing tool: post_variables with file_key: {file_key}") + try: + endpoint = f"/v1/files/{file_key}/variables" + + payload = { + "variableCollections": variableCollections, + "variables": variables + } + + variables_data = await make_figma_request("POST", endpoint, json_data=payload) + + result = { + "status": variables_data.get("status"), + "error": variables_data.get("error"), + "meta": variables_data.get("meta", {}), + "variables": variables_data.get("variables", {}), + "variableCollections": variables_data.get("variableCollections", {}) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool post_variables: {e}") + return { + "error": "Failed to create/update variables", + "file_key": file_key, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/figma/tools/webhooks.py b/mcp_servers/figma/tools/webhooks.py new file mode 100644 index 00000000..f4d93035 --- /dev/null +++ b/mcp_servers/figma/tools/webhooks.py @@ -0,0 +1,154 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_figma_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_team_webhooks(team_id: str) -> Dict[str, Any]: + """Get webhooks for a team.""" + logger.info(f"Executing tool: get_team_webhooks with team_id: {team_id}") + try: + endpoint = f"/v1/teams/{team_id}/webhooks" + + webhooks_data = await make_figma_request("GET", endpoint) + + result = { + "webhooks": [] + } + + for webhook in webhooks_data.get("webhooks", []): + webhook_info = { + "id": webhook.get("id"), + "team_id": webhook.get("team_id"), + "event_type": webhook.get("event_type"), + "client_id": webhook.get("client_id"), + "endpoint": webhook.get("endpoint"), + "passcode": webhook.get("passcode"), + "status": webhook.get("status"), + "description": webhook.get("description"), + "protocol_version": webhook.get("protocol_version") + } + result["webhooks"].append(webhook_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_team_webhooks: {e}") + return { + "error": "Failed to retrieve team webhooks", + "team_id": team_id, + "exception": str(e) + } + +async def post_webhook(team_id: str, event_type: str, endpoint: str, + passcode: Optional[str] = None, description: Optional[str] = None) -> Dict[str, Any]: + """Create a webhook.""" + logger.info(f"Executing tool: post_webhook for team_id: {team_id}, event_type: {event_type}") + try: + webhook_endpoint = "/v1/webhooks" + + payload = { + "team_id": team_id, + "event_type": event_type, + "endpoint": endpoint + } + + if passcode: + payload["passcode"] = passcode + if description: + payload["description"] = description + + webhook_data = await make_figma_request("POST", webhook_endpoint, json_data=payload) + + result = { + "id": webhook_data.get("id"), + "team_id": webhook_data.get("team_id"), + "event_type": webhook_data.get("event_type"), + "client_id": webhook_data.get("client_id"), + "endpoint": webhook_data.get("endpoint"), + "passcode": webhook_data.get("passcode"), + "status": webhook_data.get("status"), + "description": webhook_data.get("description"), + "protocol_version": webhook_data.get("protocol_version") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool post_webhook: {e}") + return { + "error": "Failed to create webhook", + "team_id": team_id, + "event_type": event_type, + "endpoint": endpoint, + "exception": str(e) + } + +async def put_webhook(webhook_id: str, event_type: Optional[str] = None, + endpoint: Optional[str] = None, passcode: Optional[str] = None, + description: Optional[str] = None) -> Dict[str, Any]: + """Update a webhook.""" + logger.info(f"Executing tool: put_webhook with webhook_id: {webhook_id}") + try: + webhook_endpoint = f"/v1/webhooks/{webhook_id}" + + payload = {} + if event_type: + payload["event_type"] = event_type + if endpoint: + payload["endpoint"] = endpoint + if passcode: + payload["passcode"] = passcode + if description: + payload["description"] = description + + if not payload: + return { + "error": "No update parameters provided", + "webhook_id": webhook_id + } + + webhook_data = await make_figma_request("PUT", webhook_endpoint, json_data=payload) + + result = { + "id": webhook_data.get("id"), + "team_id": webhook_data.get("team_id"), + "event_type": webhook_data.get("event_type"), + "client_id": webhook_data.get("client_id"), + "endpoint": webhook_data.get("endpoint"), + "passcode": webhook_data.get("passcode"), + "status": webhook_data.get("status"), + "description": webhook_data.get("description"), + "protocol_version": webhook_data.get("protocol_version") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool put_webhook: {e}") + return { + "error": "Failed to update webhook", + "webhook_id": webhook_id, + "exception": str(e) + } + +async def delete_webhook(webhook_id: str) -> Dict[str, Any]: + """Delete a webhook.""" + logger.info(f"Executing tool: delete_webhook with webhook_id: {webhook_id}") + try: + endpoint = f"/v1/webhooks/{webhook_id}" + + await make_figma_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Webhook {webhook_id} has been deleted", + "webhook_id": webhook_id + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_webhook: {e}") + return { + "error": "Failed to delete webhook", + "webhook_id": webhook_id, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/firecrawl/.eslintrc.json b/mcp_servers/firecrawl/.eslintrc.json new file mode 100644 index 00000000..390256c5 --- /dev/null +++ b/mcp_servers/firecrawl/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": { + // Package-specific rules can go here + } +} \ No newline at end of file diff --git a/mcp_servers/firecrawl/Dockerfile b/mcp_servers/firecrawl/Dockerfile index cdc25d53..534bad13 100644 --- a/mcp_servers/firecrawl/Dockerfile +++ b/mcp_servers/firecrawl/Dockerfile @@ -6,7 +6,6 @@ WORKDIR /app # Copy package.json and package-lock.json to install dependencies COPY mcp_servers/firecrawl/package.json mcp_servers/firecrawl/package-lock.json ./ -COPY mcp_servers/firecrawl/.env . # Install dependencies (ignoring scripts to prevent running the prepare script) RUN npm install --ignore-scripts @@ -27,7 +26,6 @@ WORKDIR /app COPY --from=builder /app/dist /app/dist COPY --from=builder /app/package.json /app/package.json COPY --from=builder /app/package-lock.json /app/package-lock.json -COPY --from=builder /app/.env /app/.env # Install only production dependencies RUN npm ci --omit=dev --ignore-scripts diff --git a/mcp_servers/firecrawl/README.md b/mcp_servers/firecrawl/README.md index 6b7cd769..4ad0744e 100644 --- a/mcp_servers/firecrawl/README.md +++ b/mcp_servers/firecrawl/README.md @@ -1,121 +1,73 @@ # Firecrawl MCP Server -This directory contains a Model Context Protocol (MCP) server for integrating [Firecrawl](https://firecrawl.dev/) capabilities into applications like Klavis, Cursor, Claude, and other LLM clients. It allows leveraging Firecrawl's powerful web scraping, crawling, and data extraction features through a standardized protocol. +A Model Context Protocol (MCP) server for Firecrawl integration. Web scraping and content extraction using Firecrawl's API for reliable data harvesting. -This server is based on the official [mendableai/firecrawl-mcp-server](https://github.com/mendableai/firecrawl-mcp-server) and is provided under the MIT license. +## šŸš€ Quick Start - Run in 30 Seconds -## Features +### 🌐 Using Hosted Service (Recommended for Production) -This server exposes the following Firecrawl functionalities as tools: +Get instant access to Firecrawl with our managed infrastructure - **no setup required**: -* `firecrawl_scrape`: Scrape content from a single URL with advanced options (formats, selectors, main content extraction, JS rendering, etc.). -* `firecrawl_map`: Discover URLs starting from a given URL using sitemaps and link crawling. -* `firecrawl_crawl`: Perform an asynchronous crawl starting from a URL, collecting data from multiple pages. -* `firecrawl_batch_scrape`: Scrape multiple URLs efficiently in parallel. -* `firecrawl_check_batch_status`: Check the status of an ongoing batch scrape or crawl operation. -* `firecrawl_search`: Perform a web search using Firecrawl's search capabilities and optionally scrape results. -* `firecrawl_extract`: Extract structured data from web pages using LLMs based on a provided schema and prompt. -* `firecrawl_generate_llmstxt`: Generate `llms.txt` and `llms-full.txt` files for a domain to guide LLM interactions. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -## Prerequisites +```bash +pip install klavis +# or +npm install klavis +``` -* **Node.js:** Version 18.0.0 or higher. -* **npm:** Node Package Manager (usually comes with Node.js). -* **Docker:** (Recommended) For containerized deployment. -* **Firecrawl API Key:** Obtainable from [Firecrawl.dev](https://firecrawl.dev/). +```python +from klavis import Klavis -## Environment Setup +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("FIRECRAWL", "user123") +``` -Before running the server, you need to configure your Firecrawl API credentials. +### 🐳 Using Docker (For Self-Hosting) -1. Copy the example environment file: - ```bash - cp mcp_servers/firecrawl/.env.example mcp_servers/firecrawl/.env - ``` -2. Edit `mcp_servers/firecrawl/.env` and add your Firecrawl API key: - ```dotenv - # Firecrawl API credentials - FIRECRAWL_API_KEY=your-actual-api-key-here +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/firecrawl-mcp-server:latest - # --- Optional: For self-hosted Firecrawl instances --- - # Uncomment and set if you are NOT using the cloud service - # FIRECRAWL_API_URL=https://your-self-hosted-firecrawl-url.com - # ------ - # --- Optional: Retry configuration --- - # You can adjust these values if needed - # FIRECRAWL_RETRY_MAX_ATTEMPTS=3 - # FIRECRAWL_RETRY_INITIAL_DELAY=1000 - # FIRECRAWL_RETRY_MAX_DELAY=10000 - # FIRECRAWL_RETRY_BACKOFF_FACTOR=2 - # ------ - ``` +# Run Firecrawl MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/firecrawl-mcp-server:latest +``` -* `FIRECRAWL_API_KEY` (Required): Your API key for the Firecrawl service. -* `FIRECRAWL_API_URL` (Optional): The URL of your self-hosted Firecrawl instance. Only needed if you are not using the default cloud service. -* Retry variables (Optional): Control how the server retries requests upon encountering rate limits or transient errors. +**API Key Setup:** Get your Firecrawl API key from the [Firecrawl Dashboard](https://firecrawl.dev/). -*(Note: When using Docker, the `.env` file is automatically copied into the image during the build process as specified in the `Dockerfile`.)* +## šŸ› ļø Available Tools -## Running Locally +- **Web Scraping**: Extract content from websites and web pages +- **Content Parsing**: Parse HTML and extract structured data +- **Bulk Crawling**: Crawl multiple pages and websites efficiently +- **Data Extraction**: Extract specific data points and metadata +- **Format Conversion**: Convert web content to various formats -There are two primary ways to run the server locally: +## šŸ“š Documentation & Support -### 1. Using Docker (Recommended) +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -This method packages the server and its dependencies into a container. +## šŸ¤ Contributing -1. **Build the Docker Image:** - * Navigate to the root directory of the `klavis` project. - * Run the build command: - ```bash - # Replace 'firecrawl-mcp-server' with your desired tag - docker build -t firecrawl-mcp-server -f mcp_servers/firecrawl/Dockerfile . - ``` +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. -2. **Run the Docker Container:** - ```bash - # This runs the server on port 5000 - docker run -p 5000:5000 --env-file mcp_servers/firecrawl/.env firecrawl-mcp-server - ``` - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container. - * `--env-file mcp_servers/firecrawl/.env`: Passes the environment variables from your `.env` file to the container. +## šŸ“œ License -The server will start, and you should see log output indicating it's running, typically listening on `http://localhost:5000`. +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. -### 2. Using Node.js / npm +--- -This method runs the server directly using your local Node.js environment. - -1. **Navigate to the Server Directory:** - ```bash - cd mcp_servers/firecrawl - ``` - -2. **Install Dependencies:** - ```bash - npm install - ``` - -3. **Build the Server Code:** - (This compiles the TypeScript code to JavaScript) - ```bash - npm run build - ``` - -4. **Start the Server:** - ```bash - npm start - ``` - -The server will start using the environment variables defined in `mcp_servers/firecrawl/.env` and listen on port 5000 (or the port specified by the `PORT` environment variable, if set). - -## Development - -* **Linting:** `npm run lint` (check code style), `npm run lint:fix` (automatically fix issues) -* **Formatting:** `npm run format` (using Prettier) -* **Testing:** `npm test` (runs Jest tests) - -## Contributing - -Contributions are welcome! Please follow standard GitHub practices (fork, branch, pull request). \ No newline at end of file +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/firecrawl/index.ts b/mcp_servers/firecrawl/index.ts index d7b28232..1685747c 100644 --- a/mcp_servers/firecrawl/index.ts +++ b/mcp_servers/firecrawl/index.ts @@ -1,6 +1,7 @@ -import express from 'express'; +import express, { Request, Response } from 'express'; import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import { Tool, CallToolRequestSchema, @@ -13,11 +14,26 @@ import FirecrawlApp, { type FirecrawlDocument, } from '@mendable/firecrawl-js'; import PQueue from 'p-queue'; +import { AsyncLocalStorage } from 'async_hooks'; import dotenv from 'dotenv'; dotenv.config(); +// Added: Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + firecrawlClient: FirecrawlApp; +}>(); + +// Added: Getter function for the client +function getFirecrawlClient() { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Firecrawl client not found in AsyncLocalStorage'); + } + return store.firecrawlClient; +} + // Tool definitions const SCRAPE_TOOL: Tool = { name: 'firecrawl_scrape', @@ -174,6 +190,7 @@ const SCRAPE_TOOL: Tool = { }, required: ['url'], }, + annotations: { category: 'FIRECRAWL_SEARCH', readOnlyHint: true }, }; const MAP_TOOL: Tool = { @@ -210,6 +227,7 @@ const MAP_TOOL: Tool = { }, required: ['url'], }, + annotations: { category: 'FIRECRAWL_WEB_SEARCH', readOnlyHint: true }, }; const CRAWL_TOOL: Tool = { @@ -322,6 +340,7 @@ const CRAWL_TOOL: Tool = { }, required: ['url'], }, + annotations: { category: 'FIRECRAWL_WEB_SEARCH', readOnlyHint: true }, }; const BATCH_SCRAPE_TOOL: Tool = { @@ -373,6 +392,7 @@ const BATCH_SCRAPE_TOOL: Tool = { }, required: ['urls'], }, + annotations: { category: 'FIRECRAWL_WEB_SEARCH', readOnlyHint: true }, }; const CHECK_BATCH_STATUS_TOOL: Tool = { @@ -388,6 +408,7 @@ const CHECK_BATCH_STATUS_TOOL: Tool = { }, required: ['id'], }, + annotations: { category: 'FIRECRAWL_METADATA', readOnlyHint: true }, }; const CHECK_CRAWL_STATUS_TOOL: Tool = { @@ -403,6 +424,7 @@ const CHECK_CRAWL_STATUS_TOOL: Tool = { }, required: ['id'], }, + annotations: { category: 'FIRECRAWL_METADATA', readOnlyHint: true }, }; const SEARCH_TOOL: Tool = { @@ -477,6 +499,7 @@ const SEARCH_TOOL: Tool = { }, required: ['query'], }, + annotations: { category: 'FIRECRAWL_WEB_SEARCH', readOnlyHint: true }, }; const EXTRACT_TOOL: Tool = { @@ -519,6 +542,7 @@ const EXTRACT_TOOL: Tool = { }, required: ['urls'], }, + annotations: { category: 'FIRECRAWL_AI_SEARCH', readOnlyHint: true }, }; const GENERATE_LLMSTXT_TOOL: Tool = { @@ -543,6 +567,7 @@ const GENERATE_LLMSTXT_TOOL: Tool = { }, required: ['url'], }, + annotations: { category: 'FIRECRAWL_AI_SEARCH', readOnlyHint: true }, }; // Type definitions @@ -726,37 +751,8 @@ function isGenerateLLMsTextOptions( ); } -// Server implementation -const server = new Server( - { - name: 'firecrawl-mcp', - version: '1.7.0', - }, - { - capabilities: { - tools: {}, - logging: {}, - }, - } -); - // Get optional API URL const FIRECRAWL_API_URL = process.env.FIRECRAWL_API_URL; -const FIRECRAWL_API_KEY = process.env.FIRECRAWL_API_KEY; - -// Check if API key is required (only for cloud service) -if (!FIRECRAWL_API_URL && !FIRECRAWL_API_KEY) { - console.error( - 'Error: FIRECRAWL_API_KEY environment variable is required when using the cloud service' - ); - process.exit(1); -} - -// Initialize Firecrawl client with optional API URL -const client = new FirecrawlApp({ - apiKey: FIRECRAWL_API_KEY || '', - ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), -}); // Configuration for retries and monitoring const CONFIG = { @@ -811,7 +807,7 @@ function safeLog( ); } else { // For other transport types, use the normal logging mechanism - server.sendLoggingMessage({ level, data }); + // server.sendLoggingMessage({ level, data }); } } @@ -888,6 +884,7 @@ let operationCounter = 0; async function processBatchOperation( operation: QueuedBatchOperation ): Promise { + const client = getFirecrawlClient(); try { operation.status = 'processing'; let totalCreditsUsed = 0; @@ -927,298 +924,258 @@ async function processBatchOperation( } } -// Tool handlers -server.setRequestHandler(ListToolsRequestSchema, async () => ({ - tools: [ - SCRAPE_TOOL, - MAP_TOOL, - CRAWL_TOOL, - BATCH_SCRAPE_TOOL, - CHECK_BATCH_STATUS_TOOL, - CHECK_CRAWL_STATUS_TOOL, - SEARCH_TOOL, - EXTRACT_TOOL, - GENERATE_LLMSTXT_TOOL, - ], -})); - -server.setRequestHandler(CallToolRequestSchema, async (request) => { - const startTime = Date.now(); - try { - const { name, arguments: args } = request.params; - - // Log incoming request with timestamp - safeLog( - 'info', - `[${new Date().toISOString()}] Received request for tool: ${name}` - ); - - if (!args) { - throw new Error('No arguments provided'); +const getFirecrawlMcpServer = () => { + // Server implementation + const server = new Server( + { + name: 'firecrawl-mcp', + version: '1.7.0', + }, + { + capabilities: { + tools: {}, + logging: {}, + }, } + ); - switch (name) { - case 'firecrawl_scrape': { - if (!isScrapeOptions(args)) { - throw new Error('Invalid arguments for firecrawl_scrape'); - } - const { url, ...options } = args; - try { - const scrapeStartTime = Date.now(); - safeLog( - 'info', - `Starting scrape for URL: ${url} with options: ${JSON.stringify(options)}` - ); - - const response = await client.scrapeUrl(url, { - ...options, - // @ts-expect-error Extended API options including origin - origin: 'mcp-server', - }); - - // Log performance metrics - safeLog( - 'info', - `Scrape completed in ${Date.now() - scrapeStartTime}ms` - ); - - if ('success' in response && !response.success) { - throw new Error(response.error || 'Scraping failed'); - } - - // Format content based on requested formats - const contentParts = []; + // Tool handlers + server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [ + SCRAPE_TOOL, + MAP_TOOL, + CRAWL_TOOL, + BATCH_SCRAPE_TOOL, + CHECK_BATCH_STATUS_TOOL, + CHECK_CRAWL_STATUS_TOOL, + SEARCH_TOOL, + EXTRACT_TOOL, + GENERATE_LLMSTXT_TOOL, + ], + })); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const startTime = Date.now(); + try { + const { name, arguments: args } = request.params; + const client = getFirecrawlClient(); + + // Log incoming request with timestamp + safeLog( + 'info', + `[${new Date().toISOString()}] Received request for tool: ${name}` + ); - if (options.formats?.includes('markdown') && response.markdown) { - contentParts.push(response.markdown); - } - if (options.formats?.includes('html') && response.html) { - contentParts.push(response.html); - } - if (options.formats?.includes('rawHtml') && response.rawHtml) { - contentParts.push(response.rawHtml); - } - if (options.formats?.includes('links') && response.links) { - contentParts.push(response.links.join('\n')); - } - if (options.formats?.includes('screenshot') && response.screenshot) { - contentParts.push(response.screenshot); - } - if (options.formats?.includes('extract') && response.extract) { - contentParts.push(JSON.stringify(response.extract, null, 2)); - } + if (!args) { + throw new Error('No arguments provided'); + } - // If options.formats is empty, default to markdown - if (!options.formats || options.formats.length === 0) { - options.formats = ['markdown']; + switch (name) { + case 'firecrawl_scrape': { + if (!isScrapeOptions(args)) { + throw new Error('Invalid arguments for firecrawl_scrape'); } + const { url, ...options } = args; + try { + const scrapeStartTime = Date.now(); + safeLog( + 'info', + `Starting scrape for URL: ${url} with options: ${JSON.stringify(options)}` + ); - // Add warning to response if present - if (response.warning) { - safeLog('warning', response.warning); - } + const response = await client.scrapeUrl(url, { + ...options, + // @ts-expect-error Extended API options including origin + origin: 'mcp-server', + }); - return { - content: [ - { - type: 'text', - text: trimResponseText( - contentParts.join('\n\n') || 'No content available' - ), - }, - ], - isError: false, - }; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], - isError: true, - }; - } - } + // Log performance metrics + safeLog( + 'info', + `Scrape completed in ${Date.now() - scrapeStartTime}ms` + ); - case 'firecrawl_map': { - if (!isMapOptions(args)) { - throw new Error('Invalid arguments for firecrawl_map'); - } - const { url, ...options } = args; - const response = await client.mapUrl(url, { - ...options, - // @ts-expect-error Extended API options including origin - origin: 'mcp-server', - }); - if ('error' in response) { - throw new Error(response.error); - } - if (!response.links) { - throw new Error('No links received from Firecrawl API'); - } - return { - content: [ - { type: 'text', text: trimResponseText(response.links.join('\n')) }, - ], - isError: false, - }; - } + if ('success' in response && !response.success) { + throw new Error(response.error || 'Scraping failed'); + } - case 'firecrawl_batch_scrape': { - if (!isBatchScrapeOptions(args)) { - throw new Error('Invalid arguments for firecrawl_batch_scrape'); - } + // Format content based on requested formats + const contentParts = []; - try { - const operationId = `batch_${++operationCounter}`; - const operation: QueuedBatchOperation = { - id: operationId, - urls: args.urls, - options: args.options, - status: 'pending', - progress: { - completed: 0, - total: args.urls.length, - }, - }; + if (options.formats?.includes('markdown') && response.markdown) { + contentParts.push(response.markdown); + } + if (options.formats?.includes('html') && response.html) { + contentParts.push(response.html); + } + if (options.formats?.includes('rawHtml') && response.rawHtml) { + contentParts.push(response.rawHtml); + } + if (options.formats?.includes('links') && response.links) { + contentParts.push(response.links.join('\n')); + } + if (options.formats?.includes('screenshot') && response.screenshot) { + contentParts.push(response.screenshot); + } + if (options.formats?.includes('extract') && response.extract) { + contentParts.push(JSON.stringify(response.extract, null, 2)); + } - batchOperations.set(operationId, operation); + // If options.formats is empty, default to markdown + if (!options.formats || options.formats.length === 0) { + options.formats = ['markdown']; + } - // Queue the operation - batchQueue.add(() => processBatchOperation(operation)); + // Add warning to response if present + if (response.warning) { + safeLog('warning', response.warning); + } - safeLog( - 'info', - `Queued batch operation ${operationId} with ${args.urls.length} URLs` - ); + return { + content: [ + { + type: 'text', + text: trimResponseText( + contentParts.join('\n\n') || 'No content available' + ), + }, + ], + isError: false, + }; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], + isError: true, + }; + } + } + case 'firecrawl_map': { + if (!isMapOptions(args)) { + throw new Error('Invalid arguments for firecrawl_map'); + } + const { url, ...options } = args; + const response = await client.mapUrl(url, { + ...options, + // @ts-expect-error Extended API options including origin + origin: 'mcp-server', + }); + if ('error' in response) { + throw new Error(response.error); + } + if (!response.links) { + throw new Error('No links received from Firecrawl API'); + } return { content: [ - { - type: 'text', - text: trimResponseText( - `Batch operation queued with ID: ${operationId}. Use firecrawl_check_batch_status to check progress.` - ), - }, + { type: 'text', text: trimResponseText(response.links.join('\n')) }, ], isError: false, }; - } catch (error) { - const errorMessage = - error instanceof Error - ? error.message - : `Batch operation failed: ${JSON.stringify(error)}`; - return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], - isError: true, - }; } - } - case 'firecrawl_check_batch_status': { - if (!isStatusCheckOptions(args)) { - throw new Error('Invalid arguments for firecrawl_check_batch_status'); - } + case 'firecrawl_batch_scrape': { + if (!isBatchScrapeOptions(args)) { + throw new Error('Invalid arguments for firecrawl_batch_scrape'); + } - const operation = batchOperations.get(args.id); - if (!operation) { - return { - content: [ - { - type: 'text', - text: trimResponseText( - `No batch operation found with ID: ${args.id}` - ), + try { + const operationId = `batch_${++operationCounter}`; + const operation: QueuedBatchOperation = { + id: operationId, + urls: args.urls, + options: args.options, + status: 'pending', + progress: { + completed: 0, + total: args.urls.length, }, - ], - isError: true, - }; - } + }; - const status = `Batch Status: -Status: ${operation.status} -Progress: ${operation.progress.completed}/${operation.progress.total} -${operation.error ? `Error: ${operation.error}` : ''} -${operation.result - ? `Results: ${JSON.stringify(operation.result, null, 2)}` - : '' - }`; - - return { - content: [{ type: 'text', text: trimResponseText(status) }], - isError: false, - }; - } + batchOperations.set(operationId, operation); - case 'firecrawl_crawl': { - if (!isCrawlOptions(args)) { - throw new Error('Invalid arguments for firecrawl_crawl'); - } - const { url, ...options } = args; - const response = await withRetry( - async () => - // @ts-expect-error Extended API options including origin - client.asyncCrawlUrl(url, { ...options, origin: 'mcp-server' }), - 'crawl operation' - ); + // Queue the operation + batchQueue.add(() => processBatchOperation(operation)); - if (!response.success) { - throw new Error(response.error); - } + safeLog( + 'info', + `Queued batch operation ${operationId} with ${args.urls.length} URLs` + ); - // Monitor credits for cloud API - if (!FIRECRAWL_API_URL && hasCredits(response)) { - await updateCreditUsage(response.creditsUsed); + return { + content: [ + { + type: 'text', + text: trimResponseText( + `Batch operation queued with ID: ${operationId}. Use firecrawl_check_batch_status to check progress.` + ), + }, + ], + isError: false, + }; + } catch (error) { + const errorMessage = + error instanceof Error + ? error.message + : `Batch operation failed: ${JSON.stringify(error)}`; + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], + isError: true, + }; + } } - return { - content: [ - { - type: 'text', - text: trimResponseText( - `Started crawl for ${url} with job ID: ${response.id}` - ), - }, - ], - isError: false, - }; - } + case 'firecrawl_check_batch_status': { + if (!isStatusCheckOptions(args)) { + throw new Error('Invalid arguments for firecrawl_check_batch_status'); + } - case 'firecrawl_check_crawl_status': { - if (!isStatusCheckOptions(args)) { - throw new Error('Invalid arguments for firecrawl_check_crawl_status'); - } - const response = await client.checkCrawlStatus(args.id); - if (!response.success) { - throw new Error(response.error); - } - const status = `Crawl Status: -Status: ${response.status} -Progress: ${response.completed}/${response.total} -Credits Used: ${response.creditsUsed} -Expires At: ${response.expiresAt} -${response.data.length > 0 ? '\nResults:\n' + formatResults(response.data) : '' - }`; - return { - content: [{ type: 'text', text: trimResponseText(status) }], - isError: false, - }; - } + const operation = batchOperations.get(args.id); + if (!operation) { + return { + content: [ + { + type: 'text', + text: trimResponseText( + `No batch operation found with ID: ${args.id}` + ), + }, + ], + isError: true, + }; + } - case 'firecrawl_search': { - if (!isSearchOptions(args)) { - throw new Error('Invalid arguments for firecrawl_search'); + const status = `Batch Status: + Status: ${operation.status} + Progress: ${operation.progress.completed}/${operation.progress.total} + ${operation.error ? `Error: ${operation.error}` : ''} + ${operation.result + ? `Results: ${JSON.stringify(operation.result, null, 2)}` + : '' + }`; + + return { + content: [{ type: 'text', text: trimResponseText(status) }], + isError: false, + }; } - try { + + case 'firecrawl_crawl': { + if (!isCrawlOptions(args)) { + throw new Error('Invalid arguments for firecrawl_crawl'); + } + const { url, ...options } = args; const response = await withRetry( async () => - client.search(args.query, { ...args, origin: 'mcp-server' }), - 'search operation' + // @ts-expect-error Extended API options including origin + client.asyncCrawlUrl(url, { ...options, origin: 'mcp-server' }), + 'crawl operation' ); if (!response.success) { - throw new Error( - `Search failed: ${response.error || 'Unknown error'}` - ); + throw new Error(response.error); } // Monitor credits for cloud API @@ -1226,220 +1183,279 @@ ${response.data.length > 0 ? '\nResults:\n' + formatResults(response.data) : '' await updateCreditUsage(response.creditsUsed); } - // Format the results - const results = response.data - .map( - (result) => - `URL: ${result.url} -Title: ${result.title || 'No title'} -Description: ${result.description || 'No description'} -${result.markdown ? `\nContent:\n${result.markdown}` : ''}` - ) - .join('\n\n'); - return { - content: [{ type: 'text', text: trimResponseText(results) }], + content: [ + { + type: 'text', + text: trimResponseText( + `Started crawl for ${url} with job ID: ${response.id}` + ), + }, + ], isError: false, }; - } catch (error) { - const errorMessage = - error instanceof Error - ? error.message - : `Search failed: ${JSON.stringify(error)}`; + } + + case 'firecrawl_check_crawl_status': { + if (!isStatusCheckOptions(args)) { + throw new Error('Invalid arguments for firecrawl_check_crawl_status'); + } + const response = await client.checkCrawlStatus(args.id); + if (!response.success) { + throw new Error(response.error); + } + const status = `Crawl Status: + Status: ${response.status} + Progress: ${response.completed}/${response.total} + Credits Used: ${response.creditsUsed} + Expires At: ${response.expiresAt} + ${response.data.length > 0 ? '\nResults:\n' + formatResults(response.data) : '' + }`; return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], - isError: true, + content: [{ type: 'text', text: trimResponseText(status) }], + isError: false, }; } - } - case 'firecrawl_extract': { - if (!isExtractOptions(args)) { - throw new Error('Invalid arguments for firecrawl_extract'); - } + case 'firecrawl_search': { + if (!isSearchOptions(args)) { + throw new Error('Invalid arguments for firecrawl_search'); + } + try { + const response = await withRetry( + async () => + client.search(args.query, { ...args, origin: 'mcp-server' }), + 'search operation' + ); - try { - const extractStartTime = Date.now(); + if (!response.success) { + throw new Error( + `Search failed: ${response.error || 'Unknown error'}` + ); + } - safeLog( - 'info', - `Starting extraction for URLs: ${args.urls.join(', ')}` - ); + // Monitor credits for cloud API + if (!FIRECRAWL_API_URL && hasCredits(response)) { + await updateCreditUsage(response.creditsUsed); + } - // Log if using self-hosted instance - if (FIRECRAWL_API_URL) { - safeLog('info', 'Using self-hosted instance for extraction'); - } + // Format the results + const results = response.data + .map( + (result) => + `URL: ${result.url} + Title: ${result.title || 'No title'} + Description: ${result.description || 'No description'} + ${result.markdown ? `\nContent:\n${result.markdown}` : ''}` + ) + .join('\n\n'); - const extractResponse = await withRetry( - async () => - client.extract(args.urls, { - prompt: args.prompt, - systemPrompt: args.systemPrompt, - schema: args.schema, - allowExternalLinks: args.allowExternalLinks, - enableWebSearch: args.enableWebSearch, - includeSubdomains: args.includeSubdomains, - origin: 'mcp-server', - } as ExtractParams), - 'extract operation' - ); + return { + content: [{ type: 'text', text: trimResponseText(results) }], + isError: false, + }; + } catch (error) { + const errorMessage = + error instanceof Error + ? error.message + : `Search failed: ${JSON.stringify(error)}`; + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], + isError: true, + }; + } + } - // Type guard for successful response - if (!('success' in extractResponse) || !extractResponse.success) { - throw new Error(extractResponse.error || 'Extraction failed'); + case 'firecrawl_extract': { + if (!isExtractOptions(args)) { + throw new Error('Invalid arguments for firecrawl_extract'); } - const response = extractResponse as ExtractResponse; + try { + const extractStartTime = Date.now(); - // Monitor credits for cloud API - if (!FIRECRAWL_API_URL && hasCredits(response)) { - await updateCreditUsage(response.creditsUsed || 0); - } + safeLog( + 'info', + `Starting extraction for URLs: ${args.urls.join(', ')}` + ); - // Log performance metrics - safeLog( - 'info', - `Extraction completed in ${Date.now() - extractStartTime}ms` - ); + // Log if using self-hosted instance + if (FIRECRAWL_API_URL) { + safeLog('info', 'Using self-hosted instance for extraction'); + } - // Add warning to response if present - const result = { - content: [ - { - type: 'text', - text: trimResponseText(JSON.stringify(response.data, null, 2)), - }, - ], - isError: false, - }; + const extractResponse = await withRetry( + async () => + client.extract(args.urls, { + prompt: args.prompt, + systemPrompt: args.systemPrompt, + schema: args.schema, + allowExternalLinks: args.allowExternalLinks, + enableWebSearch: args.enableWebSearch, + includeSubdomains: args.includeSubdomains, + origin: 'mcp-server', + } as ExtractParams), + 'extract operation' + ); - if (response.warning) { - safeLog('warning', response.warning); - } + // Type guard for successful response + if (!('success' in extractResponse) || !extractResponse.success) { + throw new Error(extractResponse.error || 'Extraction failed'); + } - return result; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); + const response = extractResponse as ExtractResponse; - // Special handling for self-hosted instance errors - if ( - FIRECRAWL_API_URL && - errorMessage.toLowerCase().includes('not supported') - ) { + // Monitor credits for cloud API + if (!FIRECRAWL_API_URL && hasCredits(response)) { + await updateCreditUsage(response.creditsUsed || 0); + } + + // Log performance metrics safeLog( - 'error', - 'Extraction is not supported by this self-hosted instance' + 'info', + `Extraction completed in ${Date.now() - extractStartTime}ms` ); - return { + + // Add warning to response if present + const result = { content: [ { type: 'text', - text: trimResponseText( - 'Extraction is not supported by this self-hosted instance. Please ensure LLM support is configured.' - ), + text: trimResponseText(JSON.stringify(response.data, null, 2)), }, ], + isError: false, + }; + + if (response.warning) { + safeLog('warning', response.warning); + } + + return result; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + + // Special handling for self-hosted instance errors + if ( + FIRECRAWL_API_URL && + errorMessage.toLowerCase().includes('not supported') + ) { + safeLog( + 'error', + 'Extraction is not supported by this self-hosted instance' + ); + return { + content: [ + { + type: 'text', + text: trimResponseText( + 'Extraction is not supported by this self-hosted instance. Please ensure LLM support is configured.' + ), + }, + ], + isError: true, + }; + } + + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], isError: true, }; } - - return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], - isError: true, - }; } - } - case 'firecrawl_generate_llmstxt': { - if (!isGenerateLLMsTextOptions(args)) { - throw new Error('Invalid arguments for firecrawl_generate_llmstxt'); - } + case 'firecrawl_generate_llmstxt': { + if (!isGenerateLLMsTextOptions(args)) { + throw new Error('Invalid arguments for firecrawl_generate_llmstxt'); + } - try { - const { url, ...params } = args; - const generateStartTime = Date.now(); + try { + const { url, ...params } = args; + const generateStartTime = Date.now(); - safeLog('info', `Starting LLMs.txt generation for URL: ${url}`); + safeLog('info', `Starting LLMs.txt generation for URL: ${url}`); - // Start the generation process - const response = await withRetry( - async () => - // @ts-expect-error Extended API options including origin - client.generateLLMsText(url, { ...params, origin: 'mcp-server' }), - 'LLMs.txt generation' - ); + // Start the generation process + const response = await withRetry( + async () => + // @ts-expect-error Extended API options including origin + client.generateLLMsText(url, { ...params, origin: 'mcp-server' }), + 'LLMs.txt generation' + ); - if (!response.success) { - throw new Error(response.error || 'LLMs.txt generation failed'); - } + if (!response.success) { + throw new Error(response.error || 'LLMs.txt generation failed'); + } - // Log performance metrics - safeLog( - 'info', - `LLMs.txt generation completed in ${Date.now() - generateStartTime}ms` - ); + // Log performance metrics + safeLog( + 'info', + `LLMs.txt generation completed in ${Date.now() - generateStartTime}ms` + ); - // Format the response - let resultText = ''; + // Format the response + let resultText = ''; - if ('data' in response) { - resultText = `LLMs.txt content:\n\n${response.data.llmstxt}`; + if ('data' in response) { + resultText = `LLMs.txt content:\n\n${response.data.llmstxt}`; - if (args.showFullText && response.data.llmsfulltxt) { - resultText += `\n\nLLMs-full.txt content:\n\n${response.data.llmsfulltxt}`; + if (args.showFullText && response.data.llmsfulltxt) { + resultText += `\n\nLLMs-full.txt content:\n\n${response.data.llmsfulltxt}`; + } } + + return { + content: [{ type: 'text', text: trimResponseText(resultText) }], + isError: false, + }; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], + isError: true, + }; } + } + default: return { - content: [{ type: 'text', text: trimResponseText(resultText) }], - isError: false, - }; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], + content: [ + { type: 'text', text: trimResponseText(`Unknown tool: ${name}`) }, + ], isError: true, }; - } } - - default: - return { - content: [ - { type: 'text', text: trimResponseText(`Unknown tool: ${name}`) }, - ], - isError: true, - }; + } catch (error) { + // Log detailed error information + safeLog('error', { + message: `Request failed: ${error instanceof Error ? error.message : String(error) + }`, + tool: request.params.name, + arguments: request.params.arguments, + timestamp: new Date().toISOString(), + duration: Date.now() - startTime, + }); + return { + content: [ + { + type: 'text', + text: trimResponseText( + `Error: ${error instanceof Error ? error.message : String(error)}` + ), + }, + ], + isError: true, + }; + } finally { + // Log request completion with performance metrics + safeLog('info', `Request completed in ${Date.now() - startTime}ms`); } - } catch (error) { - // Log detailed error information - safeLog('error', { - message: `Request failed: ${error instanceof Error ? error.message : String(error) - }`, - tool: request.params.name, - arguments: request.params.arguments, - timestamp: new Date().toISOString(), - duration: Date.now() - startTime, - }); - return { - content: [ - { - type: 'text', - text: trimResponseText( - `Error: ${error instanceof Error ? error.message : String(error)}` - ), - }, - ], - isError: true, - }; - } finally { - // Log request completion with performance metrics - safeLog('info', `Request completed in ${Date.now() - startTime}ms`); - } -}); + }); + + return server; +} // Helper function to format results function formatResults(data: FirecrawlDocument[]): string { @@ -1464,29 +1480,152 @@ function trimResponseText(text: string): string { return text.trim(); } +function extractApiKey(req: Request): string { + let authData = process.env.API_KEY; + + if (authData) { + return authData; + } + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Firecrawl API key is missing. Provide it via API_KEY env var or x-auth-data header with token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.token ?? authDataJson.api_key ?? ''; +} + const app = express(); + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + + // Added: Get API key from env or header + const apiKey = extractApiKey(req); + + // Added: Instantiate client within request context + const firecrawlClient = new FirecrawlApp({ + apiKey: apiKey || '', // Use empty string if only API URL is provided (self-hosted) + ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), + }); + + const server = getFirecrawlMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ firecrawlClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + // to support multiple simultaneous connections we have a lookup object from // sessionId to transport -const transports: { [sessionId: string]: SSEServerTransport } = {}; +const transports = new Map(); app.get("/sse", async (req, res) => { - const transport = new SSEServerTransport('/messages', res); - transports[transport.sessionId] = transport; - res.on("close", () => { - delete transports[transport.sessionId]; + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } }); + + transports.set(transport.sessionId, transport); + + const server = getFirecrawlMcpServer(); await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); }); app.post("/messages", async (req, res) => { const sessionId = req.query.sessionId as string; - const transport = transports[sessionId]; + const transport = transports.get(sessionId); if (transport) { - await transport.handlePostMessage(req, res); + // Added: Get API key from env or header + const apiKey = extractApiKey(req); + + // Added: Instantiate client within request context + const firecrawlClient = new FirecrawlApp({ + apiKey: apiKey || '', // Use empty string if only API URL is provided (self-hosted) + ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), + }); + + // Added: Run handler within AsyncLocalStorage context + asyncLocalStorage.run({ firecrawlClient }, async () => { + await transport.handlePostMessage(req, res); + }); } else { - res.status(400).send('No transport found for sessionId'); + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); } }); -app.listen(5000); \ No newline at end of file +app.listen(5000, () => { + console.log('server running on port 5000'); +}); \ No newline at end of file diff --git a/mcp_servers/firecrawl/package-lock.json b/mcp_servers/firecrawl/package-lock.json index 5ee21a16..8910a905 100644 --- a/mcp_servers/firecrawl/package-lock.json +++ b/mcp_servers/firecrawl/package-lock.json @@ -1,16 +1,16 @@ { - "name": "firecrawl-mcp", + "name": "@klavis-ai/mcp-server-firecrawl", "version": "1.7.2", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "firecrawl-mcp", + "name": "@klavis-ai/mcp-server-firecrawl", "version": "1.7.2", "license": "MIT", "dependencies": { "@mendable/firecrawl-js": "^1.19.0", - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "dotenv": "^16.4.7", "express": "^5.1.0", "p-queue": "^8.0.1", @@ -623,9 +623,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -673,9 +673,9 @@ } }, "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -1196,18 +1196,19 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.8.0.tgz", - "integrity": "sha512-e06W7SwrontJDHwCawNO5SGxG+nU9AAx+jpHHZqGl/WrDBdWOpvirC+s58VpJTB5QemI4jTRcjWT4Pt3Q1NPQQ==", + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.3.tgz", + "integrity": "sha512-DyVYSOafBvk3/j1Oka4z5BWT8o4AFmoNyZY9pALOm7Lh3GZglR71Co4r4dEUoqDWdDazIZQHBe7J2Nwkg6gHgQ==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", - "pkce-challenge": "^4.1.0", + "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" @@ -1745,7 +1746,6 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", @@ -1858,13 +1858,13 @@ "license": "MIT" }, "node_modules/axios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", - "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -2021,9 +2021,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2783,9 +2783,9 @@ } }, "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -3031,7 +3031,6 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, "license": "MIT" }, "node_modules/fast-glob": { @@ -3068,7 +3067,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, "license": "MIT" }, "node_modules/fast-levenshtein": { @@ -3224,14 +3222,15 @@ } }, "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -3422,9 +3421,9 @@ } }, "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -3893,9 +3892,9 @@ } }, "node_modules/jake/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -4565,7 +4564,6 @@ "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { @@ -5159,9 +5157,9 @@ } }, "node_modules/pkce-challenge": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-4.1.0.tgz", - "integrity": "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", "license": "MIT", "engines": { "node": ">=16.20.0" @@ -5327,7 +5325,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -5977,9 +5974,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -6248,7 +6245,6 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" diff --git a/mcp_servers/firecrawl/package.json b/mcp_servers/firecrawl/package.json index a2888942..6e1da3b8 100644 --- a/mcp_servers/firecrawl/package.json +++ b/mcp_servers/firecrawl/package.json @@ -1,5 +1,5 @@ { - "name": "firecrawl-mcp", + "name": "@klavis-ai/mcp-server-firecrawl", "version": "1.7.2", "description": "MCP server for Firecrawl web scraping integration. Supports both cloud and self-hosted instances. Features include web scraping, batch processing, structured data extraction, and LLM-powered content analysis.", "type": "module", @@ -25,7 +25,7 @@ "license": "MIT", "dependencies": { "@mendable/firecrawl-js": "^1.19.0", - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "dotenv": "^16.4.7", "p-queue": "^8.0.1", "shx": "^0.3.4", diff --git a/mcp_servers/firecrawl_deep_research/.eslintrc.json b/mcp_servers/firecrawl_deep_research/.eslintrc.json new file mode 100644 index 00000000..6a1b1376 --- /dev/null +++ b/mcp_servers/firecrawl_deep_research/.eslintrc.json @@ -0,0 +1,11 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/firecrawl_deep_research/Dockerfile b/mcp_servers/firecrawl_deep_research/Dockerfile index 0c75cae5..b6138015 100644 --- a/mcp_servers/firecrawl_deep_research/Dockerfile +++ b/mcp_servers/firecrawl_deep_research/Dockerfile @@ -6,7 +6,6 @@ WORKDIR /app # Copy package.json and package-lock.json to install dependencies COPY mcp_servers/firecrawl_deep_research/package.json mcp_servers/firecrawl_deep_research/package-lock.json ./ -COPY mcp_servers/firecrawl_deep_research/.env . # Install dependencies (ignoring scripts to prevent running the prepare script) RUN npm install --ignore-scripts @@ -27,7 +26,6 @@ WORKDIR /app COPY --from=builder /app/dist /app/dist COPY --from=builder /app/package.json /app/package.json COPY --from=builder /app/package-lock.json /app/package-lock.json -COPY --from=builder /app/.env /app/.env # Install only production dependencies RUN npm ci --omit=dev --ignore-scripts diff --git a/mcp_servers/firecrawl_deep_research/README.md b/mcp_servers/firecrawl_deep_research/README.md index fdf7e4c2..65d98bbd 100644 --- a/mcp_servers/firecrawl_deep_research/README.md +++ b/mcp_servers/firecrawl_deep_research/README.md @@ -1,125 +1,73 @@ # Firecrawl Deep Research MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +A Model Context Protocol (MCP) server for Firecrawl Deep Research integration. Advanced web research and content analysis using Firecrawl's deep research capabilities. -This is a Model Context Protocol (MCP) server specifically designed to provide Firecrawl's powerful Deep Research capabilities to compatible Large Language Model (LLM) clients like Cursor, Claude, and others. It is a specialized version adapted from the official [mendableai/firecrawl-mcp-server](https://github.com/mendableai/firecrawl-mcp-server). +## šŸš€ Quick Start - Run in 30 Seconds -This server allows LLMs to perform in-depth web research on a given query by intelligently crawling, searching, and analyzing web content. +### 🌐 Using Hosted Service (Recommended for Production) -## Features +Get instant access to Firecrawl Deep Research with our managed infrastructure - **no setup required**: -* Provides the `firecrawl_deep_research` tool via MCP. -* Leverages Firecrawl's API for sophisticated web crawling and data extraction. -* Handles asynchronous research tasks. -* Configurable retry logic for API calls. -* Easy deployment via Docker or local Node.js setup. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -## Prerequisites +```bash +pip install klavis +# or +npm install klavis +``` -* **Node.js:** Version 18.0.0 or higher. -* **npm:** (Comes with Node.js) or an alternative package manager like pnpm. -* **Docker:** (Optional, but recommended for ease of deployment). -* **Firecrawl API Key:** You need an API key from [Firecrawl.dev](https://firecrawl.dev). +```python +from klavis import Klavis -## Environment Variables +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("FIRECRAWL_DEEP_RESEARCH", "user123") +``` -Before running the server, you need to configure the necessary environment variables. Create a `.env` file in the `mcp_servers/firecrawl_deep_research` directory by copying `.env.example`: +### 🐳 Using Docker (For Self-Hosting) ```bash -cp mcp_servers/firecrawl_deep_research/.env.example mcp_servers/firecrawl_deep_research/.env -``` +# Pull latest image +docker pull ghcr.io/klavis-ai/firecrawl-deep-research-mcp-server:latest + -Then, edit the `.env` file and add your credentials: - -* `FIRECRAWL_API_KEY` (Required): Your Firecrawl API key. -* `FIRECRAWL_API_URL` (Optional): If you are using a self-hosted Firecrawl instance, uncomment this line and provide the URL. Otherwise, leave it commented to use the default cloud service. -* `FIRECRAWL_RETRY_MAX_ATTEMPTS` (Optional, Default: 3): Number of retry attempts for rate-limited requests. -* `FIRECRAWL_RETRY_INITIAL_DELAY` (Optional, Default: 1000): Initial delay before first retry (in milliseconds). -* `FIRECRAWL_RETRY_MAX_DELAY` (Optional, Default: 10000): Maximum delay between retries (in milliseconds). -* `FIRECRAWL_RETRY_BACKOFF_FACTOR` (Optional, Default: 2): Multiplier for exponential backoff. - -## Running the Server - -There are two primary ways to run the server: - -### 1. Using Docker (Recommended) - -This method encapsulates the application and its dependencies in a container. - -1. **Create `.env` file:** Ensure you have created and configured the `.env` file inside the `mcp_servers/firecrawl_deep_research` directory as described above. The Docker build process will copy this file into the image. -2. **Build the Docker Image:** Navigate to the **root** directory of the `klavis` project (the parent directory of `mcp_servers`) and run the build command: - ```bash - docker build -t firecrawl-deep-research-mcp -f mcp_servers/firecrawl_deep_research/Dockerfile . - ``` - * `-t firecrawl-deep-research-mcp`: Tags the image with the name `firecrawl-deep-research-mcp`. - * `-f mcp_servers/firecrawl_deep_research/Dockerfile`: Specifies the path to the Dockerfile relative to the build context. - * `.`: Specifies the build context (the `klavis` root directory). - -3. **Run the Docker Container:** - ```bash - docker run -p 5000:5000 --name firecrawl-server firecrawl-deep-research-mcp - ``` - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container (where the server listens). - * `--name firecrawl-server`: Assigns a name to the running container for easier management. - * `firecrawl-deep-research-mcp`: The name of the image to run. - -The server should now be running and accessible on `http://localhost:5000`. - -### 2. Using Node.js (Local Development) - -This method is suitable for development or environments where Docker is not available. - -1. **Navigate to the Server Directory:** - ```bash - cd mcp_servers/firecrawl_deep_research - ``` -2. **Create `.env` file:** Ensure you have created and configured the `.env` file in this directory as described in the "Environment Variables" section. -3. **Install Dependencies:** - ```bash - npm install - ``` -4. **Build the TypeScript Code:** - ```bash - npm run build - ``` -5. **Start the Server:** - ```bash - npm start - ``` - -The server should now be running and accessible on `http://localhost:5000`. - -## Usage - -Once the server is running (either via Docker or Node.js), LLM clients configured to use MCP can connect to it at `http://localhost:5000` (or the appropriate host/port if deployed elsewhere). - -The server exposes the `firecrawl_deep_research` tool. An LLM can invoke this tool with the following arguments: - -```json -{ - "name": "firecrawl_deep_research", - "arguments": { - "query": "Research question or topic", - "maxDepth": 3, // Optional: Max crawl/search depth (default: 3) - "timeLimit": 120, // Optional: Time limit in seconds (default: 120) - "maxUrls": 50 // Optional: Max URLs to analyze (default: 50) - } -} +# Run Firecrawl Deep Research MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/firecrawl-deep-research-mcp-server:latest ``` -The server will then initiate the deep research process using the Firecrawl API and return the final analysis generated by Firecrawl's LLM based on the gathered information. +**API Key Setup:** Get your Firecrawl API key from the [Firecrawl Dashboard](https://firecrawl.dev/). -## Contributing +## šŸ› ļø Available Tools -Contributions are welcome! Please follow standard GitHub practices (fork, branch, submit a pull request). Ensure code quality by running linting and formatting checks: +- **Deep Research**: Conduct comprehensive web research on topics +- **Content Analysis**: Analyze and extract insights from web content +- **Research Reports**: Generate structured research reports +- **Multi-Source**: Aggregate information from multiple web sources +- **Topic Exploration**: Explore topics with advanced research capabilities -```bash -# Run linter -npm run lint +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. -# Automatically fix linting issues -npm run lint:fix +--- -# Format code -npm run format -``` \ No newline at end of file +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/firecrawl_deep_research/index.ts b/mcp_servers/firecrawl_deep_research/index.ts index 1d8e46a5..60050cf2 100644 --- a/mcp_servers/firecrawl_deep_research/index.ts +++ b/mcp_servers/firecrawl_deep_research/index.ts @@ -1,16 +1,32 @@ -import express from 'express'; +import express, { Request, Response } from 'express'; import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import { Tool, CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextprotocol/sdk/types.js'; import FirecrawlApp from '@mendable/firecrawl-js'; +import { AsyncLocalStorage } from 'async_hooks'; import dotenv from 'dotenv'; dotenv.config(); +// Added: Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + firecrawlClient: FirecrawlApp; +}>(); + +// Added: Getter function for the client +function getFirecrawlClient() { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Firecrawl client not found in AsyncLocalStorage'); + } + return store.firecrawlClient; +} + // Tool definition for deep research const DEEP_RESEARCH_TOOL: Tool = { name: 'firecrawl_deep_research', @@ -38,39 +54,11 @@ const DEEP_RESEARCH_TOOL: Tool = { }, required: ['query'], }, + annotations: { category: 'FIRECRAWL_RESEARCH', readOnlyHint: true }, }; -// Server implementation -const server = new Server( - { - name: 'firecrawl-deep-research-mcp', - version: '1.0.0', - }, - { - capabilities: { - tools: {}, - logging: {}, - }, - } -); - // Get API config const FIRECRAWL_API_URL = process.env.FIRECRAWL_API_URL; -const FIRECRAWL_API_KEY = process.env.FIRECRAWL_API_KEY; - -// Check if API key is required (only for cloud service) -if (!FIRECRAWL_API_URL && !FIRECRAWL_API_KEY) { - console.error( - 'Error: FIRECRAWL_API_KEY environment variable is required when using the cloud service' - ); - process.exit(1); -} - -// Initialize Firecrawl client with optional API URL -const client = new FirecrawlApp({ - apiKey: FIRECRAWL_API_KEY || '', - ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), -}); // Configuration for retries const CONFIG = { @@ -119,7 +107,7 @@ function safeLog( ); } else { // For other transport types, use the normal logging mechanism - server.sendLoggingMessage({ level, data }); + // server.sendLoggingMessage({ level, data }); } } @@ -172,153 +160,279 @@ function trimResponseText(text: string): string { return text.trim(); } -// Tool handlers -server.setRequestHandler(ListToolsRequestSchema, async () => ({ - tools: [DEEP_RESEARCH_TOOL], -})); - -server.setRequestHandler(CallToolRequestSchema, async (request) => { - const startTime = Date.now(); - try { - const { name, arguments: args } = request.params; - - // Log incoming request with timestamp - safeLog( - 'info', - `[${new Date().toISOString()}] Received request for tool: ${name}` - ); - - if (!args) { - throw new Error('No arguments provided'); +const getFirecrawlDeepResearchMcpServer = () => { + // Server implementation + const server = new Server( + { + name: 'firecrawl-deep-research-mcp', + version: '1.0.0', + }, + { + capabilities: { + tools: {}, + logging: {}, + }, } + ); + // Tool handlers + server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [DEEP_RESEARCH_TOOL], + })); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const startTime = Date.now(); + const client = getFirecrawlClient(); + try { + const { name, arguments: args } = request.params; + + // Log incoming request with timestamp + safeLog( + 'info', + `[${new Date().toISOString()}] Received request for tool: ${name}` + ); - if (name === 'firecrawl_deep_research') { - if (!args || typeof args !== 'object' || !('query' in args)) { - throw new Error('Invalid arguments for firecrawl_deep_research'); + if (!args) { + throw new Error('No arguments provided'); } - try { - const researchStartTime = Date.now(); - safeLog('info', `Starting deep research for query: ${args.query}`); + if (name === 'firecrawl_deep_research') { + if (!args || typeof args !== 'object' || !('query' in args)) { + throw new Error('Invalid arguments for firecrawl_deep_research'); + } - const response = await client.deepResearch( - args.query as string, - { - maxDepth: args.maxDepth as number, - timeLimit: args.timeLimit as number, - maxUrls: args.maxUrls as number, - }, - // Activity callback - (activity: any) => { - safeLog( - 'info', - `Research activity: ${activity.message} (Depth: ${activity.depth})` - ); - }, - // Source callback - (source: any) => { - safeLog( - 'info', - `Research source found: ${source.url}${source.title ? ` - ${source.title}` : ''}` - ); - } - ); + try { + const researchStartTime = Date.now(); + safeLog('info', `Starting deep research for query: ${args.query}`); - // Log performance metrics - safeLog( - 'info', - `Deep research completed in ${Date.now() - researchStartTime}ms` - ); + const response = await client.deepResearch( + args.query as string, + { + maxDepth: args.maxDepth as number, + timeLimit: args.timeLimit as number, + maxUrls: args.maxUrls as number, + }, + // Activity callback + (activity: any) => { + safeLog( + 'info', + `Research activity: ${activity.message} (Depth: ${activity.depth})` + ); + }, + // Source callback + (source: any) => { + safeLog( + 'info', + `Research source found: ${source.url}${source.title ? ` - ${source.title}` : ''}` + ); + } + ); + + // Log performance metrics + safeLog( + 'info', + `Deep research completed in ${Date.now() - researchStartTime}ms` + ); + + if (!response.success) { + throw new Error(response.error || 'Deep research failed'); + } - if (!response.success) { - throw new Error(response.error || 'Deep research failed'); - } + // Monitor credits for cloud API + if (!FIRECRAWL_API_URL && hasCredits(response)) { + await updateCreditUsage(response.creditsUsed); + } - // Monitor credits for cloud API - if (!FIRECRAWL_API_URL && hasCredits(response)) { - await updateCreditUsage(response.creditsUsed); + // Format the results + const formattedResponse = { + finalAnalysis: response.data.finalAnalysis, + activities: response.data.activities, + sources: response.data.sources, + }; + + return { + content: [ + { + type: 'text', + text: trimResponseText(formattedResponse.finalAnalysis), + }, + ], + isError: false, + }; + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + content: [{ type: 'text', text: trimResponseText(errorMessage) }], + isError: true, + }; } - - // Format the results - const formattedResponse = { - finalAnalysis: response.data.finalAnalysis, - activities: response.data.activities, - sources: response.data.sources, - }; - + } else { return { content: [ - { - type: 'text', - text: trimResponseText(formattedResponse.finalAnalysis), - }, + { type: 'text', text: trimResponseText(`Unknown tool: ${name}`) }, ], - isError: false, - }; - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - return { - content: [{ type: 'text', text: trimResponseText(errorMessage) }], isError: true, }; } - } else { + } catch (error) { + // Log detailed error information + safeLog('error', { + message: `Request failed: ${error instanceof Error ? error.message : String(error)}`, + tool: request.params.name, + arguments: request.params.arguments, + timestamp: new Date().toISOString(), + duration: Date.now() - startTime, + }); return { content: [ - { type: 'text', text: trimResponseText(`Unknown tool: ${name}`) }, + { + type: 'text', + text: trimResponseText( + `Error: ${error instanceof Error ? error.message : String(error)}` + ), + }, ], isError: true, }; + } finally { + // Log request completion with performance metrics + safeLog('info', `Request completed in ${Date.now() - startTime}ms`); } - } catch (error) { - // Log detailed error information - safeLog('error', { - message: `Request failed: ${error instanceof Error ? error.message : String(error)}`, - tool: request.params.name, - arguments: request.params.arguments, - timestamp: new Date().toISOString(), - duration: Date.now() - startTime, + }); + + return server; +} + +function extractApiKey(req: Request): string { + let authData = process.env.API_KEY; + + if (authData) { + return authData; + } + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Firecrawl API key is missing. Provide it via API_KEY env var or x-auth-data header with token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.token ?? authDataJson.api_key ?? ''; +} + +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + + // Added: Get API key from env or header + const apiKey = extractApiKey(req); + + // Added: Instantiate client within request context + const firecrawlClient = new FirecrawlApp({ + apiKey: apiKey || '', // Use empty string if only API URL is provided (self-hosted) + ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), + }); + + const server = getFirecrawlDeepResearchMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, }); - return { - content: [ - { - type: 'text', - text: trimResponseText( - `Error: ${error instanceof Error ? error.message : String(error)}` - ), + await server.connect(transport); + asyncLocalStorage.run({ firecrawlClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', }, - ], - isError: true, - }; - } finally { - // Log request completion with performance metrics - safeLog('info', `Request completed in ${Date.now() - startTime}ms`); + id: null, + }); + } } }); -const app = express(); +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); -// to support multiple simultaneous connections we have a lookup object from -// sessionId to transport -const transports: { [sessionId: string]: SSEServerTransport } = {}; +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +// Changed: Use Map for transports +const transports = new Map(); app.get("/sse", async (req, res) => { const transport = new SSEServerTransport('/messages', res); - transports[transport.sessionId] = transport; + transports.set(transport.sessionId, transport); res.on("close", () => { - delete transports[transport.sessionId]; + transports.delete(transport.sessionId); }); + const server = getFirecrawlDeepResearchMcpServer(); await server.connect(transport); + console.log(`SSE connection established with transport: ${transport.sessionId}`); }); app.post("/messages", async (req, res) => { const sessionId = req.query.sessionId as string; - const transport = transports[sessionId]; + const transport = transports.get(sessionId); if (transport) { - await transport.handlePostMessage(req, res); + const apiKey = extractApiKey(req); + + const firecrawlClient = new FirecrawlApp({ + apiKey: apiKey || '', + ...(FIRECRAWL_API_URL ? { apiUrl: FIRECRAWL_API_URL } : {}), + }); + + asyncLocalStorage.run({ firecrawlClient }, async () => { + await transport.handlePostMessage(req, res); + }); } else { - res.status(400).send('No transport found for sessionId'); + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); } }); diff --git a/mcp_servers/firecrawl_deep_research/package-lock.json b/mcp_servers/firecrawl_deep_research/package-lock.json index 6aa96cf8..31d56f4c 100644 --- a/mcp_servers/firecrawl_deep_research/package-lock.json +++ b/mcp_servers/firecrawl_deep_research/package-lock.json @@ -1,16 +1,16 @@ { - "name": "firecrawl-deep-research-mcp", + "name": "@klavis-ai/mcp-server-firecrawl-deep-research", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "firecrawl-deep-research-mcp", + "name": "@klavis-ai/mcp-server-firecrawl-deep-research", "version": "1.0.0", "license": "MIT", "dependencies": { "@mendable/firecrawl-js": "^1.19.0", - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "dotenv": "^16.4.7", "express": "^5.1.0", "p-queue": "^8.0.1" @@ -1193,18 +1193,19 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.8.0.tgz", - "integrity": "sha512-e06W7SwrontJDHwCawNO5SGxG+nU9AAx+jpHHZqGl/WrDBdWOpvirC+s58VpJTB5QemI4jTRcjWT4Pt3Q1NPQQ==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.1.tgz", + "integrity": "sha512-8q6+9aF0yA39/qWT/uaIj6zTpC+Qu07DnN/lb9mjoquCJsAh6l3HyYqc9O3t2j7GilseOQOQimLg7W3By6jqvg==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", - "pkce-challenge": "^4.1.0", + "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" @@ -1763,7 +1764,6 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", @@ -1876,13 +1876,13 @@ "license": "MIT" }, "node_modules/axios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", - "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -3072,7 +3072,6 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, "license": "MIT" }, "node_modules/fast-glob": { @@ -3109,7 +3108,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, "license": "MIT" }, "node_modules/fast-levenshtein": { @@ -3265,14 +3263,15 @@ } }, "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -4567,7 +4566,6 @@ "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { @@ -5145,9 +5143,9 @@ } }, "node_modules/pkce-challenge": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-4.1.0.tgz", - "integrity": "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", "license": "MIT", "engines": { "node": ">=16.20.0" @@ -5313,7 +5311,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -6228,7 +6225,6 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" diff --git a/mcp_servers/firecrawl_deep_research/package.json b/mcp_servers/firecrawl_deep_research/package.json index eb0812df..5a9e826d 100644 --- a/mcp_servers/firecrawl_deep_research/package.json +++ b/mcp_servers/firecrawl_deep_research/package.json @@ -1,5 +1,5 @@ { - "name": "firecrawl-deep-research-mcp", + "name": "@klavis-ai/mcp-server-firecrawl-deep-research", "version": "1.0.0", "description": "MCP server for Firecrawl deep research integration. Specialized server for LLM-powered deep web research.", "type": "module", @@ -25,7 +25,7 @@ "license": "MIT", "dependencies": { "@mendable/firecrawl-js": "^1.19.0", - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "dotenv": "^16.4.7", "p-queue": "^8.0.1", "express": "^5.1.0" diff --git a/mcp_servers/freshdesk/.env.example b/mcp_servers/freshdesk/.env.example new file mode 100644 index 00000000..bd07bd4d --- /dev/null +++ b/mcp_servers/freshdesk/.env.example @@ -0,0 +1,5 @@ +# Port for the MCP server to listen on +FRESHDESK_MCP_SERVER_PORT=5000 +FRESHDESK_API_KEY=YOUR_FRESHDESK_API_KEY_HERE +FRESHDESK_DOMAIN=YOUR_FRESHDESK_DOMAIN_HERE + diff --git a/mcp_servers/freshdesk/Dockerfile b/mcp_servers/freshdesk/Dockerfile new file mode 100644 index 00000000..b9977b64 --- /dev/null +++ b/mcp_servers/freshdesk/Dockerfile @@ -0,0 +1,25 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/freshdesk/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/freshdesk/server.py . +COPY mcp_servers/freshdesk/util.py . +COPY mcp_servers/freshdesk/tools/ ./tools/ + +COPY mcp_servers/freshdesk/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/freshdesk/README.md b/mcp_servers/freshdesk/README.md new file mode 100644 index 00000000..674ba8ad --- /dev/null +++ b/mcp_servers/freshdesk/README.md @@ -0,0 +1,73 @@ +# Freshdesk MCP Server + +A Model Context Protocol (MCP) server for Freshdesk integration. Manage tickets, contacts, and customer support using Freshdesk's API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Freshdesk with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("FRESHDESK", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/freshdesk-mcp-server:latest + + +# Run Freshdesk MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/freshdesk-mcp-server:latest +``` + +**API Key Setup:** Get your Freshdesk API key from your [Freshdesk admin settings](https://support.freshdesk.com/en/support/solutions/articles/215517). + +## šŸ› ļø Available Tools + +- **Ticket Management**: Create, read, update, and resolve support tickets +- **Contact Management**: Manage customer contacts and information +- **Agent Operations**: Handle agent assignments and ticket routing +- **Knowledge Base**: Access and manage knowledge base articles +- **Reporting**: Generate support metrics and analytics + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/freshdesk/requirements.txt b/mcp_servers/freshdesk/requirements.txt new file mode 100644 index 00000000..35694f04 --- /dev/null +++ b/mcp_servers/freshdesk/requirements.txt @@ -0,0 +1,6 @@ +mcp==1.11.0 +starlette>=0.40.0 +uvicorn>=0.32.1 +click>=8.1.7 +python-dotenv>=1.0.1 +requests>=2.32.3 \ No newline at end of file diff --git a/mcp_servers/freshdesk/server.py b/mcp_servers/freshdesk/server.py new file mode 100644 index 00000000..5ec56fc3 --- /dev/null +++ b/mcp_servers/freshdesk/server.py @@ -0,0 +1,2093 @@ +import contextlib +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar +import base64 + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + + +from tools import ( + + # Context variables + auth_token_context, + domain_context, + + # Ticket tools + create_ticket, + update_ticket, + delete_ticket, + get_ticket_by_id, + list_tickets, + add_note_to_ticket, + filter_tickets, + merge_tickets, + restore_ticket, + watch_ticket, + unwatch_ticket, + forward_ticket, + get_archived_ticket, + delete_archived_ticket, + delete_attachment, + create_ticket_with_attachments, + delete_multiple_tickets, + reply_to_a_ticket, + update_note, + delete_note, + + # Contact tools + + create_contact, + get_contact_by_id, + list_contacts, + update_contact, + delete_contact, + search_contacts_by_name, + filter_contacts, + make_contact_agent, + restore_contact, + send_contact_invite, + merge_contacts, + + # Company tools + create_company, + get_company_by_id, + list_companies, + update_company, + delete_company, + filter_companies, + search_companies_by_name, + + # Account tools + get_current_account, + + # Agent tools + list_agents, + get_agent_by_id, + get_current_agent, + create_agent, + update_agent, + delete_agent, + search_agents, + bulk_create_agents, + + # Thread tools + create_thread, + get_thread_by_id, + update_thread, + delete_thread, + create_thread_message, + get_thread_message_by_id, + update_thread_message, + delete_thread_message +) + +# Configure logging +logger = logging.getLogger(__name__) +load_dotenv() +FRESHDESK_MCP_SERVER_PORT = int(os.getenv("FRESHDESK_MCP_SERVER_PORT", "5000")) + +def extract_credentials(request_or_scope) -> Dict[str, str]: + """Extract API key and domain from headers or environment.""" + api_key = os.getenv("API_KEY") + domain = os.getenv("DOMAIN") + auth_data = None + + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + header_value = request_or_scope.headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + header_value = headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + # If no API key from environment, try to parse from auth_data + if not api_key and auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + domain = auth_json.get('domain', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return { + 'api_key': api_key or "", + 'domain': domain or "", + } + + +attachment_schema = { + "type": "array", + "items":{ + "type": "object" , + "properties": { + "type": { + "type": "string", + "enum": ["url", "file", "base64", "local"], + "default": "local", + "description": "Type of the attachment (e.g., 'file')." + }, + "content": { + "type": "string", + "description": "Base64 encoded content of the attachment or URL of the attachment." + }, + "name": { + "type": "string", + "description": "Name of the attachment." + }, + "media_type": { + "type": "string", + "description": "Media type of the attachment. Example: 'image/png' or 'application/pdf' or 'text/plain'" + }, + "encoding": { + "type": "string", + "default": "utf-8", + "description": "Encoding of the attachment content. Default is 'utf-8'." + } + } + }, + "description": "List of attachment objects with 'type' and 'content' fields." +} + +@click.command() +@click.option("--port", default=FRESHDESK_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +@click.option("--json-response", is_flag=True, default=False, help="Enable JSON responses for StreamableHTTP instead of SSE streams") +def main(port: int, log_level: str, json_response: bool) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("freshdesk-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="freshdesk_create_ticket", + description="Create a new ticket in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "subject": { + "type": "string", + "description": "The subject of the ticket (required)." + }, + "description": { + "type": "string", + "description": "The HTML content of the ticket (required)." + }, + "email": { + "type": "string", + "format": "email", + "description": "Email address of the requester (required)." + }, + "name": { + "type": "string", + "description": "Name of the requester." + }, + "priority": { + "type": "integer", + "enum": [1, 2, 3, 4], + "description": "Priority of the ticket (1=Low, 2=Medium, 3=High, 4=Urgent). Default is 2 (Medium)." + }, + "status": { + "type": "integer", + "enum": [2, 3, 4, 5], + "description": "Status of the ticket (2=Open, 3=Pending, 4=Resolved, 5=Closed). Default is 2 (Open)." + }, + "source": { + "type": "integer", + "enum": [1, 2, 3, 7, 9, 10], + "description": "Source of the ticket (1=Email, 2=Portal, 3=Phone, 7=Chat, 9=Feedback, 10=Outbound Email). Default is 2 (Portal)." + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "List of tags to associate with the ticket." + }, + "custom_fields": { + "type": "object", + "description": "Key-value pairs of custom fields." + }, + "cc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to CC." + }, + "due_by": { + "type": "string", + "description": "Due date for the ticket (format: YYYY-MM-DD)." + }, + "fr_due_by": { + "type": "string", + "description": "Due date for the ticket (format: YYYY-MM-DD)." + }, + "attachments": attachment_schema, + "responder_id": { + "type": "integer", + "description": "ID of the responder." + }, + "parent_id": { + "type": "integer", + "description": "ID of the parent ticket. If provided, the ticket will be created as a child of the parent ticket." + }, + "company_id": { + "type": "integer", + "description": "ID of the company to assign the ticket to." + }, + "product_id": { + "type": "integer", + "description": "ID of the product to assign the ticket to." + }, + "ticket_type": { + "type": "string", + "description": "Type of the ticket. Helps categorize the ticket according to the different kinds of issues" + }, + "group_id": { + "type": "integer", + "description": "ID of the group to assign the ticket to." + }, + + }, + "required": ["subject", "description", "email"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_get_ticket_by_id", + description="Retrieve a ticket by its ID.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to retrieve (required)." + }, + "include": { + "type": "string", + "description": "Optional query parameter to include additional data (e.g., 'conversations', 'requester', 'company', 'stats')" + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_update_ticket", + description="Update an existing ticket in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to update (required)." + }, + "subject": { + "type": "string", + "description": "New subject for the ticket." + }, + "description": { + "type": "string", + "description": "New HTML content for the ticket." + }, + "priority": { + "type": "integer", + "enum": [1, 2, 3, 4], + "description": "New priority (1=Low, 2=Medium, 3=High, 4=Urgent)." + }, + "status": { + "type": "integer", + "enum": [2, 3, 4, 5], + "description": "New status (2=Open, 3=Pending, 4=Resolved, 5=Closed)." + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "New list of tags (replaces existing tags)." + }, + "custom_fields": { + "type": "object", + "description": "Updated custom fields (merges with existing)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_delete_ticket", + description="Delete a ticket by its ID.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to delete (required)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_add_note_to_ticket", + description="Add a note to a ticket.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket (required)." + }, + "body": { + "type": "string", + "description": "Content of the note (required)." + }, + "user_id": { + "type": "integer", + "description": "ID of the user adding the note (defaults to authenticated user)." + }, + "incoming": { + "type": "boolean", + "description": "Whether the note is incoming. Default is false." + }, + "notify_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to notify" + }, + "private": { + "type": "boolean", + "description": "Whether the note is private. Default is false." + }, + "attachments": attachment_schema + }, + "required": ["ticket_id", "body"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_NOTE"}) + ), + types.Tool( + name="freshdesk_reply_to_a_ticket", + description="Reply to a ticket.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket (required)." + }, + "body": { + "type": "string", + "description": "Content of the reply (required)." + }, + "user_id": { + "type": "integer", + "description": "ID of the user replying (defaults to authenticated user)." + }, + "cc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to CC" + }, + "bcc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to BCC" + }, + "from_email": { + "type": "string", + "description": "Email address to use as the sender" + }, + "attachments": attachment_schema + }, + "required": ["ticket_id", "body"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_update_note", + description="Update a note or reply to a ticket.", + inputSchema={ + "type": "object", + "properties": { + "note_id": { + "type": "integer", + "description": "ID of the note (required)." + }, + "body": { + "type": "string", + "description": "Content of the note (required)." + }, + "attachments": attachment_schema + }, + "required": ["note_id", "body"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_NOTE"}) + ), + types.Tool( + name="freshdesk_delete_note", + description="Delete a note or reply to a ticket.", + inputSchema={ + "type": "object", + "properties": { + "note_id": { + "type": "integer", + "description": "ID of the note (required)." + } + }, + "required": ["note_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_NOTE"}) + ), + types.Tool( + name="freshdesk_forward_ticket", + description="Forward a ticket to additional email addresses.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to forward (required)." + }, + "to_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to forward to (required)." + }, + "cc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of CC email addresses." + }, + "bcc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of BCC email addresses." + }, + "body": { + "type": "string", + "description": "Custom message to include in the forward." + }, + "subject": { + "type": "string", + "description": "Custom subject for the forwarded email." + } + }, + "required": ["ticket_id", "to_emails"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_get_archived_ticket", + description="Retrieve an archived ticket by its ID.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the archived ticket to retrieve (required)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_delete_archived_ticket", + description="Permanently delete an archived ticket.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the archived ticket to delete (required)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_filter_tickets", + description="Use ticket fields to filter through tickets and get a list of tickets matching the specified ticket fields.", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Query string to filter tickets (required). Format - (ticket_field:integer OR ticket_field:'string') AND ticket_field:boolean" + }, + "page": { + "type": "integer", + "description": "Page number (for pagination). Default is 1." + }, + "per_page": { + "type": "integer", + "description": "Number of results per page (max 30). Default is 30." + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_create_ticket_with_attachments", + description="Create a new ticket with attachments in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "subject": { + "type": "string", + "description": "The subject of the ticket (required)." + }, + "description": { + "type": "string", + "description": "The HTML content of the ticket (required)." + }, + "email": { + "type": "string", + "format": "email", + "description": "Email address of the requester (required)." + }, + "name": { + "type": "string", + "description": "Name of the requester." + }, + "priority": { + "type": "integer", + "enum": [1, 2, 3, 4], + "description": "Priority of the ticket (1=Low, 2=Medium, 3=High, 4=Urgent). Default is 2 (Medium)." + }, + "status": { + "type": "integer", + "enum": [2, 3, 4, 5], + "description": "Status of the ticket (2=Open, 3=Pending, 4=Resolved, 5=Closed). Default is 2 (Open)." + }, + "source": { + "type": "integer", + "enum": [1, 2, 3, 7, 9, 10], + "description": "Source of the ticket (1=Email, 2=Portal, 3=Phone, 7=Chat, 9=Feedback, 10=Outbound Email). Default is 2 (Portal)." + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "List of tags to associate with the ticket." + }, + "custom_fields": { + "type": "object", + "description": "Key-value pairs of custom fields." + }, + "cc_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to CC." + }, + "attachments": attachment_schema, + "due_by": { + "type": "string", + "description": "Due date for the ticket (ISO 8601 format)." + }, + "fr_due_by": { + "type": "string", + "description": "Due date for the ticket (ISO 8601 format)." + }, + "group_id": { + "type": "integer", + "description": "ID of the group." + }, + "responder_id": { + "type": "integer", + "description": "ID of the responder." + }, + "parent_id": { + "type": "integer", + "description": "ID of the parent ticket. If provided, the ticket will be created as a child of the parent ticket." + } + }, + "required": ["subject", "description", "email"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_ATTACHMENT"}) + ), + types.Tool( + name="freshdesk_delete_multiple_tickets", + description="Delete multiple tickets at once.", + inputSchema={ + "type": "object", + "properties": { + "ticket_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of ticket IDs to delete (required)." + } + }, + "required": ["ticket_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_delete_attachment", + description="Delete an attachment from a ticket.", + inputSchema={ + "type": "object", + "properties": { + "attachment_id": { + "type": "integer", + "description": "ID of the attachment to delete (required)." + } + }, + "required": ["attachment_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_ATTACHMENT"}) + ), + types.Tool( + name="freshdesk_list_tickets", + description="List tickets with optional filtering.", + inputSchema={ + "type": "object", + "properties": { + "status": { + "type": "integer", + "enum": [2, 3, 4, 5], + "description": "Filter by status (2=Open, 3=Pending, 4=Resolved, 5=Closed)." + }, + "priority": { + "type": "integer", + "enum": [1, 2, 3, 4], + "description": "Filter by priority (1=Low, 2=Medium, 3=High, 4=Urgent)." + }, + "requester_id": { + "type": "integer", + "description": "Filter by requester ID." + }, + "email": { + "type": "string", + "format": "email", + "description": "Filter by email address." + }, + "agent_id": { + "type": "integer", + "description": "Filter by agent ID (ID of the agent to whom the ticket has been assigned)." + }, + "company_id": { + "type": "integer", + "description": "Filter by company ID." + }, + "group_id": { + "type": "integer", + "description": "Filter by group ID." + }, + "ticket_type": { + "type": "string", + "description": "Filter by ticket type." + }, + "updated_since": { + "type": "string", + "format": "date-time", + "description": "Only return tickets updated since this date (ISO 8601 format)." + }, + "created_since": { + "type": "string", + "format": "date-time", + "description": "Only return tickets created since this date (ISO 8601 format)." + }, + "due_by": { + "type": "string", + "format": "date-time", + "description": "Only return tickets due by this date (ISO 8601 format)." + }, + "order_by": { + "type": "string", + "description": "Order by (created_at, updated_at, priority, status). Default is created_at." + }, + "order_type": { + "type": "string", + "enum": ["asc", "desc"], + "description": "Order type (asc or desc). Default is desc." + }, + "include": { + "type": "string", + "description": "Include additional data (stats, requester, description)." + }, + "page": { + "type": "integer", + "description": "Page number for pagination. Default is 1." + }, + "per_page": { + "type": "integer", + "description": "Number of results per page (max 100). Default is 30." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_merge_tickets", + description="Merge two tickets.", + inputSchema={ + "type": "object", + "properties": { + "primary_ticket_id": { + "type": "integer", + "description": "ID of the ticket to be merged (will be closed) (required)." + }, + "ticket_ids": { + "type": "integer", + "description": "ID of the ticket to merge into (required)." + }, + "convert_recepients_to_cc": { + "type": "boolean", + "description": "Convert recipients to CC (optional). Default is False." + } + }, + "required": ["primary_ticket_id", "ticket_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_restore_ticket", + description="Restore a deleted ticket.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to restore (required)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_watch_ticket", + description="Watch a ticket for updates.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to watch (required)." + }, + "user_id": { + "type": "integer", + "description": "ID of the user to watch the ticket (defaults to authenticated user)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_unwatch_ticket", + description="Stop watching a ticket.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "integer", + "description": "ID of the ticket to unwatch (required)." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_TICKET"}) + ), + types.Tool( + name="freshdesk_create_contact", + description="Create a new contact in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the contact" + }, + "email": { + "type": "string", + "format": "email", + "description": "Primary email address" + }, + "phone": { + "type": "string", + "description": "Telephone number" + }, + "company_id": { + "type": "integer", + "description": "ID of the company" + }, + "description": { + "type": "string", + "description": "Description of the contact" + } + }, + "required": ["name"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_get_contact_by_id", + description="Retrieve a contact by ID.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": {"type": "integer", "description": "ID of the contact to retrieve"} + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_list_contacts", + description="List all contacts, optionally filtered by parameters.", + inputSchema={ + "type": "object", + "properties": { + "email": { + "type": "string", + "format": "email", + "description": "Filter by email" + }, + "phone": { + "type": "string", + "description": "Filter by phone number" + }, + "mobile": { + "type": "string", + "description": "Filter by mobile number" + }, + "company_id": { + "type": "integer", + "description": "Filter by company ID" + }, + "state": { + "type": "string", + "description": "Filter by state (verified, unverified, blocked, deleted)" + }, + "updated_since": { + "type": "string", + "description": "Filter by last updated date (ISO 8601 format)" + }, + "page": { + "type": "integer", + "description": "Page number for pagination. Default is 1." + }, + "per_page": { + "type": "integer", + "description": "Number of results per page (max 100). Default is 30." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_update_contact", + description="Update an existing contact.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "integer", + "description": "ID of the contact to update" + }, + "name": { + "type": "string", + "description": "New name" + }, + "email": { + "type": "string", + "format": "email", + "description": "New primary email" + }, + "phone": { + "type": "string", + "description": "New phone number" + }, + "mobile": { + "type": "string", + "description": "New mobile number" + }, + "company_id": { + "type": "integer", + "description": "New company ID" + }, + "description": { + "type": "string", + "description": "New description" + }, + "job_title": { + "type": "string", + "description": "New job title" + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "Updated list of tags" + }, + "custom_fields": { + "type": "object", + "description": "Updated custom fields" + }, + "avatar_path": { + "type": "string", + "description": "Path to new avatar image file" + }, + "address": { + "type": "string", + "description": "Address of the contact" + } + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_delete_contact", + description="Delete a contact. Set hard_delete=True to permanently delete.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "integer", + "description": "ID of the contact to delete" + }, + "hard_delete": { + "type": "boolean", + "default": False, + "description": "If true, permanently delete the contact" + }, + "force": { + "type": "boolean", + "default": False, + "description": "If true, force hard delete even if not soft deleted first" + } + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_search_contacts_by_name", + description="Search for contacts by name", + inputSchema={ + "type": "object", + "properties": { + "name": {"type": "string", "description": "Name of the contact to search for"} + }, + "required": ["name"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_filter_contacts", + description="Filter contacts by fields", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Filter query using this format - contact_field:integer OR contact_field:'string' AND contact_field:boolean e.g. {query: 'field_name:field_value'} - name:John Doe" + }, + "page": { + "type": "integer", + "description": "Page number (1-based)", + "default": 1 + }, + "updated_since": { + "type": "string", + "description": "Filter by last updated date (ISO 8601 format)" + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_make_contact_agent", + description="Make a contact an agent.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": {"type": "integer", "description": "ID of the contact to make an agent"}, + "occasional": {"type": "boolean", "description": "Whether agent is occasional"}, + "signature": {"type": "string", "description": "HTML signature for the agent"}, + "ticket_scope": {"type": "integer", "description": "Ticket scope for the agent"}, + "skill_ids": {"type": "array", "items": {"type": "integer"}, "description": "List of skill IDs"}, + "group_ids": {"type": "array", "items": {"type": "integer"}, "description": "List of group IDs"}, + "role_ids": {"type": "array", "items": {"type": "integer"}, "description": "List of role IDs"}, + "agent_type": {"type": "string", "description": "Agent type (support_agent or business_agent)"}, + "focus_mode": {"type": "boolean", "description": "Whether agent is in focus mode"} + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_restore_contact", + description="Restore a deleted contact.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": {"type": "integer", "description": "ID of the contact to restore"} + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_send_contact_invite", + description="Send an invite to a contact.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": {"type": "integer", "description": "ID of the contact to send an invite to"} + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_merge_contacts", + description="Merge multiple contacts into a primary contact.", + inputSchema={ + "type": "object", + "properties": { + "primary_contact_id": { + "type": "integer", + "description": "ID of the primary contact to merge into" + }, + "secondary_contact_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of contact IDs to merge into the primary contact" + }, + "contact_data": { + "type": "object", + "description": "Optional dictionary of fields to update on the primary contact", + "properties": { + "email": { + "type": "string", + "description": "Primary email address of the contact." + }, + "phone": { + "type": "string", + "description": "Phone number of the contact." + }, + "mobile": { + "type": "string", + "description": "Mobile number of the contact." + }, + "company_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "IDs of the companies associated with the contact" + }, + "other_emails": { + "type": "array", + "items": {"type": "string"}, + "description": "Additional emails associated with the contact" + }, + } + } + }, + "required": ["primary_contact_id", "secondary_contact_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_CONTACT"}) + ), + types.Tool( + name="freshdesk_create_company", + description="Create a new company in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the company (required, unique)" + }, + "domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of company domains" + }, + "description": { + "type": "string", + "description": "Description of the company" + }, + "note": { + "type": "string", + "description": "Any specific note about the company" + }, + "health_score": { + "type": "string", + "description": "Health score of the company" + }, + "account_tier": { + "type": "string", + "description": "Account tier of the company" + }, + "renewal_date": { + "type": "string", + "description": "Contract renewal date (YYYY-MM-DD)" + }, + "industry": { + "type": "string", + "description": "Industry the company serves in" + }, + "custom_fields": { + "type": "object", + "description": "Dictionary of custom field values" + }, + "lookup_parameter": { + "type": "string", + "enum": ["display_id", "primary_field_value"], + "default": "display_id", + "description": "Lookup parameter type for custom objects" + } + }, + "required": ["name"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY"}) + ), + types.Tool( + name="freshdesk_get_company_by_id", + description="Retrieve a company by ID.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "integer", + "description": "ID of the company to retrieve (required)." + } + }, + "required": ["company_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_list_companies", + description="List all companies with optional filtering.", + inputSchema={ + "type": "object", + "properties": { + "updated_since": { + "type": "string", + "format": "date-time", + "description": "Filter companies updated since this date (ISO 8601 format)" + }, + "page": { + "type": "integer", + "minimum": 1, + "default": 1, + "description": "Page number (1-based)" + }, + "per_page": { + "type": "integer", + "minimum": 1, + "maximum": 100, + "default": 30, + "description": "Number of records per page (max 100)" + } + } + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_update_company", + description="Update an existing company.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "integer", + "description": "ID of the company to update (required)." + }, + "name": { + "type": "string", + "description": "New name for the company" + }, + "domains": { + "type": "array", + "items": {"type": "string"}, + "description": "List of domains (will replace existing domains if provided)" + }, + "description": { + "type": "string", + "description": "New description" + }, + "note": { + "type": "string", + "description": "New note" + }, + "health_score": { + "type": "string", + "description": "Updated health score" + }, + "account_tier": { + "type": "string", + "description": "Updated account tier" + }, + "renewal_date": { + "type": "string", + "description": "New renewal date (YYYY-MM-DD)" + }, + "industry": { + "type": "string", + "description": "Updated industry" + }, + "custom_fields": { + "type": "object", + "description": "Dictionary of custom field values to update" + }, + "lookup_parameter": { + "type": "string", + "enum": ["display_id", "primary_field_value"], + "description": "Lookup parameter type for custom objects" + } + }, + "required": ["company_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY"}) + ), + types.Tool( + name="freshdesk_delete_company", + description="Delete a company from Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "integer", + "description": "ID of the company to delete (required)." + } + }, + "required": ["company_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY"}) + ), + types.Tool( + name="freshdesk_filter_companies", + description="Filter companies using a query string. ", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Query string to filter companies (domain, created_at, updated_at, custom fields) - company_field:integer OR company_field:'string' AND company_field:boolean e.g. {query: 'field_name:field_value'} - domain:Example" + }, + "page": { + "type": "integer", + "minimum": 1, + "default": 1, + "description": "Page number (1-based)" + }, + "per_page": { + "type": "integer", + "minimum": 1, + "maximum": 30, + "default": 30, + "description": "Number of records per page (max 30)" + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY"}) + ), + types.Tool( + name="freshdesk_search_companies_by_name", + description="Search for companies by name (autocomplete).", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Search name (case-insensitive) (required)." + } + }, + "required": ["name"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_COMPANY", "readOnlyHint": True}) + ), + + types.Tool( + name="freshdesk_get_current_account", + description="Retrieve the current account.", + inputSchema={ + "type": "object", + "properties": {}, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_ACCOUNT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_list_agents", + description="List all agents with optional filtering", + inputSchema={ + "type": "object", + "properties": { + "email": { + "type": "string", + "description": "Filter by email address" + }, + "mobile": { + "type": "string", + "description": "Filter by mobile number" + }, + "phone": { + "type": "string", + "description": "Filter by phone number" + }, + "state": { + "type": "string", + "enum": ["fulltime", "occasional"], + "description": "Filter by agent state" + }, + "page": { + "type": "integer", + "description": "Page number (1-based)", + "default": 1 + }, + "per_page": { + "type": "integer", + "description": "Number of results per page (max 100)", + "default": 30 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_get_agent_by_id", + description="Get details of a specific agent by ID", + inputSchema={ + "type": "object", + "properties": { + "agent_id": { + "type": "integer", + "description": "ID of the agent to retrieve" + }, + }, + "required": ["agent_id"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_get_current_agent", + description="Get details of the currently authenticated agent", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_create_agent", + description="Create a new agent", + inputSchema={ + "type": "object", + "properties": { + "email": { + "type": "string", + "description": "Email address of the agent" + }, + "name": { + "type": "string", + "description": "Name of the agent" + }, + "ticket_scope": { + "type": "integer", + "enum": [1, 2, 3], + "description": "Ticket permission (1=Global, 2=Group, 3=Restricted)" + }, + "role_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of role IDs for the agent" + }, + "group_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of group IDs the agent belongs to" + }, + "skill_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of skill IDs for the agent" + }, + "occasional": { + "type": "boolean", + "description": "Whether the agent is occasional (True) or full-time (False)", + "default": False + }, + "signature": { + "type": "string", + "description": "HTML signature for the agent" + }, + "language": { + "type": "string", + "description": "Language code (default: 'en')", + "default": "en" + }, + "time_zone": { + "type": "string", + "description": "Time zone for the agent" + }, + "agent_type": { + "type": "integer", + "enum": [1, 2, 3], + "description": "Type of agent (1=Support, 2=Field, 3=Collaborator)", + "default": 1 + }, + "focus_mode": { + "type": "boolean", + "description": "Whether focus mode is enabled", + "default": True + }, + }, + "required": ["email", "name", "ticket_scope", "role_ids"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT"}) + ), + types.Tool( + name="freshdesk_update_agent", + description="Update an existing agent", + inputSchema={ + "type": "object", + "properties": { + "agent_id": { + "type": "integer", + "description": "ID of the agent to update" + }, + "email": { + "type": "string", + "description": "New email address" + }, + "ticket_scope": { + "type": "integer", + "enum": [1, 2, 3], + "description": "New ticket permission (1=Global, 2=Group, 3=Restricted)" + }, + "role_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "New list of role IDs" + }, + "group_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "New list of group IDs" + }, + "skill_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "New list of skill IDs" + }, + "occasional": { + "type": "boolean", + "description": "Whether the agent is occasional" + }, + "signature": { + "type": "string", + "description": "New HTML signature" + }, + "language": { + "type": "string", + "description": "New language code" + }, + "time_zone": { + "type": "string", + "description": "New time zone" + }, + "focus_mode": { + "type": "boolean", + "description": "Whether focus mode is enabled" + }, + }, + "required": ["agent_id"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT"}) + ), + types.Tool( + name="freshdesk_delete_agent", + description="Delete an agent (downgrades to contact)", + inputSchema={ + "type": "object", + "properties": { + "agent_id": {"type": "integer", "description": "ID of the agent to delete"}, + }, + "required": ["agent_id"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT"}) + ), + types.Tool( + name="freshdesk_search_agents", + description="Search for agents by name or email", + inputSchema={ + "type": "object", + "properties": { + "term": {"type": "string", "description": "Search term (name or email)"}, + }, + "required": ["term"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT", "readOnlyHint": True}) + ), + types.Tool( + name="freshdesk_bulk_create_agents", + description="Create multiple agents in bulk", + inputSchema={ + "type": "object", + "properties": { + "agents_data": { + "type": "array", + "items": { + "type": "object", + "properties": { + "email": { + "type": "string", + "description": "Email address of the agent" + }, + "name": { + "type": "string", + "description": "Name of the agent" + }, + "ticket_scope": { + "type": "integer", + "enum": [1, 2, 3], + "description": "Ticket permission (1=Global, 2=Group, 3=Restricted)" + }, + "role_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of role IDs for the agent" + }, + "group_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of group IDs the agent belongs to" + }, + "skill_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of skill IDs for the agent" + }, + "occasional": { + "type": "boolean", + "description": "Whether the agent is occasional (True) or full-time (False)" + }, + "signature": { + "type": "string", + "description": "HTML signature for the agent" + }, + "language": { + "type": "string", + "description": "Language code (default: 'en')" + }, + "time_zone": { + "type": "string", + "description": "Time zone for the agent" + }, + "agent_type": { + "type": "integer", + "description": "Type of agent (1=Support, 2=Field, 3=Collaborator)" + }, + "focus_mode": { + "type": "boolean", + "description": "Whether focus mode is enabled (default: True)" + }, + }, + "required": ["email", "name", "ticket_scope", "role_ids"], + }, + "description": "List of agent data objects", + }, + }, + "required": ["agents_data"], + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_AGENT"}) + ), + + # Thread tools + types.Tool( + name="freshdesk_create_thread", + description="Create a new thread in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "thread_type": { + "type": "string", + "enum": ["forward", "discussion", "private"], + "description": "Type of thread (forward, discussion, private)" + }, + "parent_id": { + "type": "integer", + "description": "ID of the parent object (usually ticket)" + }, + "parent_type": { + "type": "string", + "default": "ticket", + "description": "Type of parent object (default: ticket)" + }, + "title": { + "type": "string", + "description": "Title of the thread" + }, + "created_by": { + "type": "string", + "description": "ID of the user creating the thread" + }, + "anchor_id": { + "type": "integer", + "description": "ID of the anchor object (e.g., conversation ID)" + }, + "anchor_type": { + "type": "string", + "description": "Type of anchor object (e.g., conversation)" + }, + "participants_emails": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses of participants" + }, + "participants_agents": { + "type": "array", + "items": {"type": "string"}, + "description": "List of agent IDs of participants" + }, + "additional_info": { + "type": "object", + "description": "Additional information like email_config_id" + } + }, + "required": ["thread_type", "parent_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_get_thread_by_id", + description="Get a thread by its ID.", + inputSchema={ + "type": "object", + "properties": { + "thread_id": { + "type": "integer", + "description": "ID of the thread to retrieve" + } + }, + "required": ["thread_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_update_thread", + description="Update a thread in Freshdesk.", + inputSchema={ + "type": "object", + "properties": { + "thread_id": { + "type": "integer", + "description": "ID of the thread to update" + }, + "title": { + "type": "string", + "description": "New title for the thread" + }, + "description": { + "type": "string", + "description": "New description for the thread" + } + }, + "required": ["thread_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_delete_thread", + description="Delete a thread from Freshdesk. Note: This is an irreversible action!", + inputSchema={ + "type": "object", + "properties": { + "thread_id": { + "type": "integer", + "description": "ID of the thread to delete" + } + }, + "required": ["thread_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD", "readOnlyHint": True}) + ), + + types.Tool( + name="freshdesk_create_thread_message", + description="Create a new message for a thread.", + inputSchema={ + "type": "object", + "properties": { + "thread_id": { + "type": "integer", + "description": "ID of the thread to add message to" + }, + "body": { + "type": "string", + "description": "HTML content of the message" + }, + "body_text": { + "type": "string", + "description": "Plain text content of the message" + }, + "attachment_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of attachment IDs to include" + }, + "inline_attachment_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "List of inline attachment IDs" + }, + "participants_email_to": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to send to" + }, + "participants_email_cc": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to CC" + }, + "participants_email_bcc": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "List of email addresses to BCC" + }, + "participants_email_from": { + "type": "string", + "format": "email", + "description": "Email address to send from" + }, + "additional_info": { + "type": "object", + "description": "Additional information like has_quoted_text, email_subject" + }, + "full_message": { + "type": "string", + "description": "HTML content with original and quoted text" + }, + "full_message_text": { + "type": "string", + "description": "Plain text with quoted text" + } + }, + "required": ["thread_id", "body"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_get_thread_message_by_id", + description="Get a thread message by its ID.", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "integer", + "description": "ID of the message to retrieve" + } + }, + "required": ["message_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_update_thread_message", + description="Update a thread message.", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "integer", + "description": "ID of the message to update" + }, + "body": { + "type": "string", + "description": "New HTML content of the message" + }, + "body_text": { + "type": "string", + "description": "New plain text content of the message" + }, + "attachment_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "New list of attachment IDs" + }, + "inline_attachment_ids": { + "type": "array", + "items": {"type": "integer"}, + "description": "New list of inline attachment IDs" + }, + "additional_info": { + "type": "object", + "description": "New additional information" + } + }, + "required": ["message_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + + types.Tool( + name="freshdesk_delete_thread_message", + description="Delete a thread message. Note: This is an irreversible action!", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "integer", + "description": "ID of the message to delete" + } + }, + "required": ["message_id"] + }, + annotations=types.ToolAnnotations(**{"category": "FRESHDESK_THREAD"}) + ), + ] + + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + try: + + if name == "freshdesk_create_ticket": + result = await create_ticket(**arguments) + elif name == "freshdesk_get_ticket_by_id": + result = await get_ticket_by_id(**arguments) + elif name == "freshdesk_list_tickets": + result = await list_tickets(**arguments) + elif name == "freshdesk_filter_tickets": + result = await filter_tickets(**arguments) + elif name == "freshdesk_add_note_to_ticket": + result = await add_note_to_ticket(**arguments) + elif name == "freshdesk_reply_to_a_ticket": + result = await reply_to_a_ticket(**arguments) + elif name == "freshdesk_update_note": + result = await update_note(**arguments) + elif name == "freshdesk_delete_note": + result = await delete_note(**arguments) + elif name == "freshdesk_merge_tickets": + result = await merge_tickets(**arguments) + elif name == "freshdesk_restore_ticket": + result = await restore_ticket(**arguments) + elif name == "freshdesk_watch_ticket": + result = await watch_ticket(**arguments) + elif name == "freshdesk_unwatch_ticket": + result = await unwatch_ticket(**arguments) + elif name == "freshdesk_forward_ticket": + result = await forward_ticket(**arguments) + elif name == "freshdesk_update_ticket": + result = await update_ticket(**arguments) + elif name == "freshdesk_create_ticket_with_attachments": + result = await create_ticket_with_attachments(**arguments) + elif name == "freshdesk_get_archived_ticket": + result = await get_archived_ticket(**arguments) + elif name == "freshdesk_delete_archived_ticket": + result = await delete_archived_ticket(**arguments) + elif name == "freshdesk_delete_ticket": + result = await delete_ticket(**arguments) + elif name == "freshdesk_delete_multiple_tickets": + result = await delete_multiple_tickets(**arguments) + elif name == "freshdesk_delete_attachment": + result = await delete_attachment(**arguments) + + elif name == "freshdesk_create_contact": + result = await create_contact(**arguments) + elif name == "freshdesk_get_contact_by_id": + result = await get_contact_by_id(**arguments) + elif name == "freshdesk_list_contacts": + result = await list_contacts(**arguments) + elif name == "freshdesk_update_contact": + result = await update_contact(**arguments) + elif name == "freshdesk_delete_contact": + result = await delete_contact(**arguments) + elif name == "freshdesk_search_contacts_by_name": + result = await search_contacts_by_name(**arguments) + elif name == "freshdesk_merge_contacts": + result = await merge_contacts(**arguments) + elif name == "freshdesk_filter_contacts": + result = await filter_contacts(**arguments) + elif name == "freshdesk_make_contact_agent": + result = await make_contact_agent(**arguments) + elif name == "freshdesk_restore_contact": + result = await restore_contact(**arguments) + elif name == "freshdesk_send_contact_invite": + result = await send_contact_invite(**arguments) + + elif name == "freshdesk_create_company": + result = await create_company(**arguments) + elif name == "freshdesk_get_company_by_id": + result = await get_company_by_id(**arguments) + elif name == "freshdesk_list_companies": + result = await list_companies(**arguments) + elif name == "freshdesk_update_company": + result = await update_company(**arguments) + elif name == "freshdesk_delete_company": + result = await delete_company(**arguments) + elif name == "freshdesk_filter_companies": + result = await filter_companies(**arguments) + elif name == "freshdesk_search_companies_by_name": + result = await search_companies_by_name(**arguments) + + elif name == "freshdesk_get_current_account": + result = await get_current_account() + + elif name == "freshdesk_get_current_agent": + result = await get_current_agent() + elif name == "freshdesk_get_agent_by_id": + result = await get_agent_by_id(**arguments) + elif name == "freshdesk_list_agents": + result = await list_agents(**arguments) + elif name == "freshdesk_create_agent": + result = await create_agent(**arguments) + elif name == "freshdesk_update_agent": + result = await update_agent(**arguments) + elif name == "freshdesk_delete_agent": + result = await delete_agent(**arguments) + elif name == "freshdesk_search_agents": + result = await search_agents(**arguments) + elif name == "freshdesk_bulk_create_agents": + result = await bulk_create_agents(**arguments) + + # Thread tools + elif name == "freshdesk_create_thread": + result = await create_thread(**arguments) + elif name == "freshdesk_get_thread_by_id": + result = await get_thread_by_id(**arguments) + elif name == "freshdesk_update_thread": + result = await update_thread(**arguments) + elif name == "freshdesk_delete_thread": + result = await delete_thread(**arguments) + elif name == "freshdesk_create_thread_message": + result = await create_thread_message(**arguments) + elif name == "freshdesk_get_thread_message_by_id": + result = await get_thread_message_by_id(**arguments) + elif name == "freshdesk_update_thread_message": + result = await update_thread_message(**arguments) + elif name == "freshdesk_delete_thread_message": + result = await delete_thread_message(**arguments) + else: + raise ValueError(f"Unknown tool: {name}") + + if isinstance(result, dict) and result.get("error") : + logger.error(f"Error executing tool {name}: {result.get('error')}") + return [types.TextContent(type="text", text=f"{result.get('error')}")] + + logger.info(f"Tool {name} executed successfully with arguments: {arguments}") + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except ValueError as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract credentials (API key, domain, auth token) from headers + credentials = extract_credentials(request) + + # Set the API key, auth token and domain in context for this request + auth_token = auth_token_context.set(credentials['api_key']) + domain_token = domain_context.set(credentials['domain']) + try: + async with sse.connect_sse(request.scope, request.receive, request._send) as streams: + await app.run(streams[0], streams[1], app.create_initialization_options()) + finally: + auth_token_context.reset(auth_token) + domain_context.reset(domain_token) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract credentials (API key, domain, auth token) from headers + credentials = extract_credentials(scope) + + # Set the API key, auth token and domain in context for this request + auth_token = auth_token_context.set(credentials['api_key']) + domain_token = domain_context.set(credentials['domain']) + + try: + await session_manager.handle_request(scope, receive, send) + except Exception as e: + logger.exception(f"Error handling StreamableHTTP request: {e}") + finally: + auth_token_context.reset(auth_token) + domain_context.reset(domain_token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/__init__.py b/mcp_servers/freshdesk/tools/__init__.py new file mode 100644 index 00000000..ce5efc98 --- /dev/null +++ b/mcp_servers/freshdesk/tools/__init__.py @@ -0,0 +1,153 @@ +# Freshdesk MCP Server Tools +# This package contains all the tool implementations organized by object type + + +from .base import auth_token_context, domain_context +from .tickets import ( + create_ticket, + get_ticket_by_id, + update_ticket, + delete_ticket, + delete_multiple_tickets, + list_tickets, + add_note_to_ticket, + filter_tickets, + merge_tickets, + restore_ticket, + watch_ticket, + unwatch_ticket, + delete_attachment, + create_ticket_with_attachments, + forward_ticket, + get_archived_ticket, + delete_archived_ticket, + reply_to_a_ticket, + update_note, + delete_note, +) + +from .contacts import ( + create_contact, + get_contact_by_id, + list_contacts, + update_contact, + delete_contact, + make_contact_agent, + restore_contact, + send_contact_invite, + merge_contacts, + filter_contacts, + search_contacts_by_name, +) + +from .companies import ( + create_company, + get_company_by_id, + list_companies, + update_company, + delete_company, + filter_companies, + search_companies_by_name, +) + +from .accounts import ( + get_current_account, +) + +from .agents import ( + list_agents, + get_agent_by_id, + get_current_agent, + create_agent, + update_agent, + delete_agent, + search_agents, + bulk_create_agents, +) + +from .thread import ( + create_thread, + get_thread_by_id, + update_thread, + delete_thread, + create_thread_message, + get_thread_message_by_id, + update_thread_message, + delete_thread_message +) + +__all__ = [ + + # Context variables + 'auth_token_context', + 'domain_context', + + # Tickets + 'create_ticket', + 'get_ticket_by_id', + 'update_ticket', + 'delete_ticket', + 'delete_multiple_tickets', + 'list_tickets', + 'add_note_to_ticket', + 'filter_tickets', + 'merge_tickets', + 'restore_ticket', + 'watch_ticket', + 'unwatch_ticket', + 'forward_ticket', + 'get_archived_ticket', + 'delete_archived_ticket', + 'reply_to_a_ticket', + 'update_note', + 'delete_note', + + # Attachments + 'delete_attachment', + 'create_ticket_with_attachments', + + # Contacts + 'create_contact', + 'get_contact_by_id', + 'list_contacts', + 'update_contact', + 'delete_contact', + 'make_contact_agent', + 'restore_contact', + 'send_contact_invite', + 'merge_contacts', + 'filter_contacts', + 'search_contacts_by_name', + + # Companies + 'create_company', + 'get_company_by_id', + 'list_companies', + 'update_company', + 'delete_company', + 'filter_companies', + 'search_companies_by_name', + + # Accounts + 'get_current_account', + + # Agent exports + 'list_agents', + 'get_agent_by_id', + 'get_current_agent', + 'create_agent', + 'update_agent', + 'delete_agent', + 'search_agents', + 'bulk_create_agents', + + # Threads + 'create_thread', + 'get_thread_by_id', + 'update_thread', + 'delete_thread', + 'create_thread_message', + 'get_thread_message_by_id', + 'update_thread_message', + 'delete_thread_message' +] \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/accounts.py b/mcp_servers/freshdesk/tools/accounts.py new file mode 100644 index 00000000..352b2591 --- /dev/null +++ b/mcp_servers/freshdesk/tools/accounts.py @@ -0,0 +1,19 @@ +from typing import Dict, List, Optional, Any +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values +import logging + +logger = logging.getLogger(__name__) + + +async def get_current_account() -> Dict[str, Any]: + """ + Retrieve the current account. + + Returns: + Dict containing the account data or error information + """ + try: + return await make_freshdesk_request("GET", "/account") + except Exception as e: + return handle_freshdesk_error(e, "retrieve", "current account") + \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/agents.py b/mcp_servers/freshdesk/tools/agents.py new file mode 100644 index 00000000..be207d2e --- /dev/null +++ b/mcp_servers/freshdesk/tools/agents.py @@ -0,0 +1,255 @@ +import logging +from typing import Dict, List, Optional, Any +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values + +# Configure logging +logger = logging.getLogger(__name__) + +async def list_agents( + email: Optional[str] = None, + mobile: Optional[str] = None, + phone: Optional[str] = None, + state: Optional[str] = None, + page: int = 1, + per_page: int = 30, +) -> Dict[str, Any]: + """ + List all agents with optional filtering. + + Args: + email: Filter by email address + mobile: Filter by mobile number + phone: Filter by phone number + state: Filter by state ('fulltime' or 'occasional') + page: Page number (1-based) + per_page: Number of results per page (max 100) + + Returns: + Dictionary containing agents and pagination info + """ + try: + params = { + 'email': email, + 'mobile': mobile, + 'phone': phone, + 'state': state, + 'page': page, + 'per_page': min(100, max(1, per_page)), + } + params = remove_none_values(params) + + response = await make_freshdesk_request( + 'GET', + '/agents', + options={"query_params": params} + ) + + return response + + except Exception as e: + return handle_freshdesk_error(e, 'list', 'agents') + + +async def get_agent_by_id(agent_id: int) -> Dict[str, Any]: + """ + Get details of a specific agent by ID. + + Args: + agent_id: ID of the agent to retrieve + + Returns: + Dictionary containing agent details + """ + try: + return await make_freshdesk_request('GET', f'/agents/{agent_id}') + except Exception as e: + return handle_freshdesk_error(e, 'get', 'agent', agent_id) + + +async def get_current_agent() -> Dict[str, Any]: + """ + Get details of the currently authenticated agent. + + Returns: + Dictionary containing current agent details + """ + try: + return await make_freshdesk_request('GET', '/agents/me') + except Exception as e: + return handle_freshdesk_error(e, 'get', 'current agent') + + +async def create_agent( + email: str, + name: str, + ticket_scope: int, + role_ids: List[int], + group_ids: Optional[List[int]] = None, + skill_ids: Optional[List[int]] = None, + occasional: bool = False, + signature: Optional[str] = None, + language: str = 'en', + time_zone: Optional[str] = None, + agent_type: int = 1, + focus_mode: bool = True, + **kwargs +) -> Dict[str, Any]: + """ + Create a new agent. + + Args: + email: Email address of the agent + name: Name of the agent + ticket_scope: Ticket permission (1=Global, 2=Group, 3=Restricted) + role_ids: List of role IDs for the agent + group_ids: List of group IDs the agent belongs to + skill_ids: List of skill IDs for the agent + occasional: Whether the agent is occasional (True) or full-time (False) + signature: HTML signature for the agent + language: Language code (default: 'en') + time_zone: Time zone for the agent + agent_type: Type of agent (1=Support, 2=Field, 3=Collaborator) + focus_mode: Whether focus mode is enabled (default: True) + + Returns: + Dictionary containing the created agent details + """ + try: + data = { + 'email': email, + 'name': name, + 'ticket_scope': ticket_scope, + 'role_ids': role_ids, + 'group_ids': group_ids, + 'skill_ids': skill_ids, + 'occasional': occasional, + 'signature': signature, + 'language': language, + 'time_zone': time_zone, + 'agent_type': agent_type, + 'focus_mode': focus_mode, + **kwargs + } + + data = remove_none_values(data) + + return await make_freshdesk_request('POST', '/agents', data=data) + + except Exception as e: + return handle_freshdesk_error(e, 'create', 'agent') + + +async def update_agent( + agent_id: int, + email: Optional[str] = None, + ticket_scope: Optional[int] = None, + role_ids: Optional[List[int]] = None, + group_ids: Optional[List[int]] = None, + skill_ids: Optional[List[int]] = None, + occasional: Optional[bool] = None, + signature: Optional[str] = None, + language: Optional[str] = None, + time_zone: Optional[str] = None, + focus_mode: Optional[bool] = None, + **kwargs +) -> Dict[str, Any]: + """ + Update an existing agent. + + Args: + agent_id: ID of the agent to update + email: New email address + ticket_scope: New ticket permission (1=Global, 2=Group, 3=Restricted) + role_ids: New list of role IDs + group_ids: New list of group IDs + skill_ids: New list of skill IDs + occasional: Whether the agent is occasional + signature: New HTML signature + language: New language code + time_zone: New time zone + focus_mode: Whether focus mode is enabled + + Returns: + Dictionary containing the updated agent details + """ + try: + data = { + 'email': email, + 'ticket_scope': ticket_scope, + 'role_ids': role_ids, + 'group_ids': group_ids, + 'skill_ids': skill_ids, + 'occasional': occasional, + 'signature': signature, + 'language': language, + 'time_zone': time_zone, + 'focus_mode': focus_mode, + **kwargs + } + + data = remove_none_values(data) + + if not data: + raise ValueError("No fields to update") + + return await make_freshdesk_request('PUT', f'/agents/{agent_id}', data=data) + + except Exception as e: + return handle_freshdesk_error(e, 'update', 'agent', agent_id) + + +async def delete_agent(agent_id: int) -> Dict[str, Any]: + """ + Delete an agent (downgrades to contact). + + Args: + agent_id: ID of the agent to delete + + Returns: + Empty dictionary on success + """ + try: + await make_freshdesk_request('DELETE', f'/agents/{agent_id}') + return {'success': True, 'message': f'Agent {agent_id} deleted successfully'} + except Exception as e: + return handle_freshdesk_error(e, 'delete', 'agent', agent_id) + + +async def search_agents(term: str) -> List[Dict[str, Any]]: + """ + Search for agents by name or email. + + Args: + term: Search term (name or email) + + Returns: + List of matching agents + """ + try: + return await make_freshdesk_request( + 'GET', + '/agents/autocomplete', + options={"query_params": {"term": term}} + ) + except Exception as e: + return handle_freshdesk_error(e, 'search', 'agents') + + +async def bulk_create_agents(agents_data: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Create multiple agents in bulk. + + Args: + agents_data: List of agent data dictionaries + + Returns: + Dictionary with job ID and status URL + """ + try: + return await make_freshdesk_request( + 'POST', + '/agents/bulk', + data={'agents': agents_data} + ) + except Exception as e: + return handle_freshdesk_error(e, 'bulk create', 'agents') \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/base.py b/mcp_servers/freshdesk/tools/base.py new file mode 100644 index 00000000..85eb0a40 --- /dev/null +++ b/mcp_servers/freshdesk/tools/base.py @@ -0,0 +1,351 @@ +import requests +import os +import random +import string +from typing import Any, Dict, Optional, List +import base64 +from dotenv import load_dotenv +from util import rate_limiter +from contextvars import ContextVar + + +load_dotenv() + +auth_token_context: ContextVar[str] = ContextVar('auth_token') +domain_context: ContextVar[str] = ContextVar('domain') + +def get_config_value( + context_var: ContextVar[str], + env_name: str, +) -> str: + try: + value = context_var.get() + if value: + return value + except LookupError: + pass + + value = os.getenv(env_name) + if value: + return value + + raise RuntimeError( + f"No {env_name} found. Please set either the {env_name} environment variable " + f"or provide it in the context." + ) + + +def get_auth_token() -> str: + return get_config_value( + context_var=auth_token_context, + env_name="FRESHDESK_API_KEY" + ) + + +def get_domain() -> str: + return get_config_value( + context_var=domain_context, + env_name="FRESHDESK_DOMAIN" + ) + + +def gen_random_password(length: int = 10) -> str: + """Generate a random string of specified length.""" + return ''.join(random.choices(string.ascii_letters + string.digits, k=length)) + + +async def make_freshdesk_request( + method: str, + endpoint: str, + data: Optional[Dict] = None, + options: Optional[Dict] = {}, +) -> Any: + """Make an HTTP request to the Freshdesk API. + + Args: + method: HTTP method (GET, POST, PUT, DELETE, etc.) + endpoint: API endpoint (e.g., '/tickets') + data: Optional request payload as a dictionary + options: Optional request options as a dictionary + timeout: Request timeout in seconds (default: 10) + use_pwd: Use basic authentication with a random password (default: True) + query_params: Optional query parameters as a dictionary + headers: Optional headers as a dictionary + files: Optional files as a dictionary + + Returns: + Parsed JSON response from the API + + Raises: + ValueError: For invalid input parameters + requests.exceptions.RequestException: For HTTP and connection errors + json.JSONDecodeError: If response cannot be parsed as JSON + """ + if not isinstance(method, str) or not method.strip(): + raise ValueError("HTTP method must be a non-empty string") + + if not isinstance(endpoint, str) or not endpoint.strip(): + raise ValueError("Endpoint must be a non-empty string") + + if data is not None and not isinstance(data, dict): + raise ValueError("Data must be a dictionary or None") + + FRESHDESK_API_BASE = f"https://{get_domain()}.freshdesk.com/api/v2" + FRESHDESK_API_KEY = get_auth_token() + + + url = f"{FRESHDESK_API_BASE}{endpoint}" + + + timeout = int(options.get("timeout", 10)) + use_pwd = options.get("use_pwd", True) + query_params = options.get("query_params", {}) + extra_headers = options.get("headers", {}) + files = options.get("files", None) + + headers = { + "Content-Type": "application/json", + **extra_headers, + } + + if files: + del headers["Content-Type"] + + random_password = gen_random_password() + + request_args = {} + + if query_params: + request_args["params"] = query_params + + if files: + request_args["files"] = files + + if headers.get("Content-Type", None) == "application/json": + request_args["json"] = data + else: + request_args["data"] = data + + if not use_pwd: + api_key = base64.b64encode(f"{FRESHDESK_API_KEY}:{random_password}").decode("utf-8") + headers["Authorization"] = f"{api_key}" + else: + request_args["auth"] = (FRESHDESK_API_KEY, random_password) + + try: + + response = requests.request( + method=method.upper(), + url=url, + headers=headers, + timeout=timeout, + **request_args, + ) + + rate_limiter.update_from_headers(response.headers) + + if response.status_code == 429: + retry_after = int(response.headers.get('Retry-After', 30)) + await asyncio.sleep(retry_after) + return await make_freshdesk_request(method, endpoint, data, options) + + # Log response status for debugging + print(f"Freshdesk API {method.upper()} {endpoint} - Status: {response.status_code}") + + # Raise HTTPError for 4XX/5XX responses + response.raise_for_status() + + # Handle empty responses (e.g., 204 No Content) + if not response.content: + return {} + + return response.json() + + except requests.exceptions.Timeout as e: + raise requests.exceptions.Timeout(f"Request to Freshdesk API timed out after {timeout} seconds") from e + + except requests.exceptions.TooManyRedirects as e: + raise requests.exceptions.TooManyRedirects("Too many redirects while connecting to Freshdesk API") from e + + except ValueError as e: + raise ValueError(f"Failed to parse JSON response from Freshdesk API: {str(e)}") from e + + +def handle_freshdesk_error(e: Exception, operation: str, object_type: str = "") -> Dict[str, Any]: + """Handle Freshdesk errors and return a standardized error response. + + Args: + e: The exception that was raised + operation: The operation being performed (e.g., 'create', 'update', 'delete') + object_type: The type of object being operated on (e.g., 'ticket', 'contact') + + Returns: + A dictionary containing error details with the following structure: + { + 'success': False, + 'error': { + 'code': 'error_code', + 'message': 'Human-readable error message', + 'details': [ + {'field': 'field_name', 'message': 'Error message', 'code': 'error_code'} + ] + } + } + """ + error_response = { + 'success': False, + 'error': { + 'code': 'unknown_error', + 'message': f"An unexpected error occurred while {operation}ing {object_type}", + 'details': [] + } + } + + # Handle requests.exceptions.RequestException and its subclasses + if isinstance(e, requests.exceptions.RequestException): + error_response['error']['code'] = 'request_error' + + # Handle HTTP errors (4XX, 5XX) + if hasattr(e, 'response') and e.response is not None: + status_code = e.response.status_code + + # Map status codes to error codes + status_to_code = { + 400: 'invalid_request', + 401: 'authentication_failed', + 403: 'access_denied', + 404: 'not_found', + 405: 'method_not_allowed', + 406: 'unsupported_accept_header', + 409: 'conflict', + 415: 'unsupported_content_type', + 429: 'rate_limit_exceeded', + 500: 'server_error' + } + + error_code = status_to_code.get(status_code, f'http_error_{status_code}') + error_response['error']['code'] = error_code + + # Try to extract error details from the response + try: + if e.response.content: + error_data = e.response.json() + error_response['error']['message'] = error_data.get('description', + f"Freshdesk API returned status {status_code}") + + # Add field-level errors if available + if 'errors' in error_data and isinstance(error_data['errors'], list): + for err in error_data['errors']: + error_detail = { + 'field': err.get('field', ''), + 'message': err.get('message', 'Validation error'), + 'code': err.get('code', 'validation_error') + } + error_response['error']['details'].append(error_detail) + + # If no field errors but there's a message, use it + elif not error_response['error']['details'] and 'message' in error_data: + error_response['error']['details'].append({ + 'message': error_data['message'], + 'code': error_code + }) + else: + error_response['error']['message'] = f"{str(e)}" + except ValueError: + # If we can't parse JSON, use the response text + error_response['error']['message'] = f"Freshdesk API error: {e.response.text}" + else: + # Handle connection errors, timeouts, etc. + if isinstance(e, requests.exceptions.Timeout): + error_response['error'].update({ + 'code': 'request_timeout', + 'message': 'The request to Freshdesk API timed out' + }) + elif isinstance(e, requests.exceptions.ConnectionError): + error_response['error'].update({ + 'code': 'connection_error', + 'message': 'Could not connect to Freshdesk API' + }) + else: + error_response['error'].update({ + 'code': 'request_failed', + 'message': f"{str(e)}" + }) + + # Handle JSON decode errors + elif isinstance(e, ValueError) and 'JSON' in str(e): + error_response['error'].update({ + 'code': 'invalid_json', + 'message': 'Received invalid JSON response from Freshdesk API' + }) + + # Handle other unexpected errors + else: + error_response['error'].update({ + 'code': 'unexpected_error', + 'message': f"An unexpected error occurred: {str(e)}" + }) + + # Add operation and object type context to the error message + if object_type: + error_response['error']['message'] = f"Failed to {operation} {object_type} due to: {error_response['error']['message']}" + else: + error_response['error']['message'] = f"Failed to {operation} due to: {error_response['error']['message']}" + + return error_response + + +def handle_freshdesk_attachments(field_name: str, attachments: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Handle attachments for Freshdesk API requests. + + Args: + field_name: Name of the field to which the attachment is being added + attachments: List of attachments to be added + + Returns: + List of resolved attachments ready to be sent in the API request + """ + resolved_attachments = [] + + for attachment in attachments: + if attachment["type"] == "local": + file_content = open(attachment["content"], "rb") + file_name = attachment.get("name", gen_random_password(15)) + + resolved_attachments.append(( + f"{field_name}", + (file_name, file_content, attachment.get("media_type", "application/octet-stream")) + )) + elif attachment["type"] == "file": + file_encoding = attachment.get("encoding", "utf-8") + file_content_str = attachment["content"] + file_content = file_content_str.encode(file_encoding) + file_name = attachment.get("name", gen_random_password(15)) + resolved_attachments.append(( + f"{field_name}", + (file_name, file_content, attachment.get("media_type", "application/octet-stream")) + )) + elif attachment["type"] == "base64": + file_content = base64.b64decode(attachment["content"]) + file_name = attachment.get("name", gen_random_password(15)) + resolved_attachments.append(( + f"{field_name}", + (file_name, file_content, attachment.get("media_type", "application/octet-stream")), + )) + elif attachment["type"] == "url": + file_content = requests.get(attachment["content"]).content + file_name = attachment.get("name", gen_random_password(15)) + resolved_attachments.append(( + f"{field_name}", + (file_name, file_content, attachment.get("media_type", "application/octet-stream")), + )) + else: + raise ValueError(f"Invalid attachment type: {attachment['type']}") + return resolved_attachments + + + +def remove_none_values(data: Dict[str, Any]) -> Dict[str, Any]: + return {k: v for k, v in data.items() if v is not None} + \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/companies.py b/mcp_servers/freshdesk/tools/companies.py new file mode 100644 index 00000000..cf1596a5 --- /dev/null +++ b/mcp_servers/freshdesk/tools/companies.py @@ -0,0 +1,241 @@ +from typing import Dict, List, Optional, Any +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values +import logging + +logger = logging.getLogger(__name__) + +async def create_company( + name: str, + domains: Optional[List[str]] = None, + description: Optional[str] = None, + note: Optional[str] = None, + health_score: Optional[str] = None, + account_tier: Optional[str] = None, + renewal_date: Optional[str] = None, + industry: Optional[str] = None, + custom_fields: Optional[Dict[str, Any]] = None, + lookup_parameter: str = "display_id" +) -> Dict[str, Any]: + """ + Create a new company in Freshdesk. + + Args: + name: Name of the company (required, unique) + domains: List of company domains + description: Description of the company + note: Any specific note about the company + health_score: Health score of the company (e.g., "Happy", "At risk") + account_tier: Account tier (e.g., "Basic", "Premium") + renewal_date: Contract renewal date (YYYY-MM-DD) + industry: Industry the company serves in + custom_fields: Dictionary of custom field values + lookup_parameter: Either "display_id" or "primary_field_value" + + Returns: + Dictionary containing the created company details + """ + try: + + company_data = { + "name": name, + "domains": domains or [], + "description": description, + "note": note, + "health_score": health_score, + "account_tier": account_tier, + "renewal_date": renewal_date, + "industry": industry, + "custom_fields": custom_fields or {}, + "lookup_parameter": lookup_parameter + } + + company_data = remove_none_values(company_data) + + response = await make_freshdesk_request("POST", "/companies", data=company_data) + return response + + except Exception as e: + return handle_freshdesk_error(e, "create", "company") + +async def get_company_by_id(company_id: int) -> Dict[str, Any]: + """ + Retrieve a company by ID. + + Args: + company_id: ID of the company to retrieve + + Returns: + Dictionary containing company details + """ + try: + response = await make_freshdesk_request("GET", f"/companies/{company_id}") + return response + except Exception as e: + return handle_freshdesk_error(e, "retrieve", "company") + +async def list_companies( + updated_since: Optional[str] = None, + page: int = 1, + per_page: int = 30 +) -> Dict[str, Any]: + """ + List all companies with optional filtering. + + Args: + updated_since: Filter companies updated since this date (ISO 8601 format) + page: Page number (1-based) + per_page: Number of records per page (max 100) + + Returns: + Dictionary containing list of companies and pagination info + """ + try: + params = { + "updated_since": updated_since, + "page": page, + "per_page": min(per_page, 100) + } + + params = remove_none_values(params) + + response = await make_freshdesk_request( + "GET", + "/companies", + options={"query_params": params} + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "list", "companies") + +async def filter_companies( + query: str, + page: int = 1, + per_page: int = 30 +) -> Dict[str, Any]: + """ + Filter companies using a query string. + + Args: + query: Search query (supports domain, custom fields, etc.) + page: Page number (1-based) + per_page: Number of records per page + + Returns: + Dictionary containing search results and pagination info + """ + try: + params = { + "query": f'"{query}"', + "page": page, + "per_page": min(per_page, 30) + } + + response = await make_freshdesk_request( + "GET", + "/search/companies", + options={"query_params": params} + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "search", "companies") + +async def update_company( + company_id: int, + name: Optional[str] = None, + domains: Optional[List[str]] = None, + description: Optional[str] = None, + note: Optional[str] = None, + health_score: Optional[str] = None, + account_tier: Optional[str] = None, + renewal_date: Optional[str] = None, + industry: Optional[str] = None, + custom_fields: Optional[Dict[str, Any]] = None, + lookup_parameter: Optional[str] = None +) -> Dict[str, Any]: + """ + Update an existing company. + + Args: + company_id: ID of the company to update + name: New name for the company + domains: List of domains (will replace existing domains if provided) + description: New description + note: New note + health_score: Updated health score + account_tier: Updated account tier + renewal_date: New renewal date (YYYY-MM-DD) + industry: Updated industry + custom_fields: Dictionary of custom field values to update + lookup_parameter: Either "display_id" or "primary_field_value" + + Returns: + Dictionary containing the updated company details + """ + try: + update_data = { + "name": name, + "domains": domains, + "description": description, + "note": note, + "health_score": health_score, + "account_tier": account_tier, + "renewal_date": renewal_date, + "industry": industry, + "custom_fields": custom_fields, + "lookup_parameter": lookup_parameter + } + + update_data = remove_none_values(update_data) + + if not update_data: + raise ValueError("No fields to update") + + response = await make_freshdesk_request( + "PUT", + f"/companies/{company_id}", + data=update_data + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "update", "company") + +async def delete_company(company_id: int) -> Dict[str, Any]: + """ + Delete a company. + + Note: This only deletes the company record, not the associated contacts. + + Args: + company_id: ID of the company to delete + + Returns: + Dictionary indicating success or failure + """ + try: + await make_freshdesk_request("DELETE", f"/companies/{company_id}") + return {"success": True, "message": f"Company {company_id} deleted successfully"} + except Exception as e: + return handle_freshdesk_error(e, "delete", "company") + +async def search_companies_by_name(name: str) -> Dict[str, Any]: + """ + Search for companies by name (autocomplete). + + Args: + name: Search name (case-insensitive) + + Returns: + Dictionary containing matching companies + """ + try: + response = await make_freshdesk_request( + "GET", + "/companies/autocomplete", + options={"query_params": {"name": name}} + ) + return response + except Exception as e: + return handle_freshdesk_error(e, "search", "companies") \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/contacts.py b/mcp_servers/freshdesk/tools/contacts.py new file mode 100644 index 00000000..7907c2ac --- /dev/null +++ b/mcp_servers/freshdesk/tools/contacts.py @@ -0,0 +1,391 @@ +import logging +from typing import Any, Dict, List, Optional +import mimetypes +import os +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values + +# Configure logging +logger = logging.getLogger(__name__) + +# Contact status constants +CONTACT_STATUS_VERIFIED = "verified" +CONTACT_STATUS_UNVERIFIED = "unverified" +CONTACT_STATUS_BLOCKED = "blocked" +CONTACT_STATUS_DELETED = "deleted" + +async def create_contact( + name: str, + email: Optional[str] = None, + phone: Optional[str] = None, + mobile: Optional[str] = None, + company_id: Optional[int] = None, + description: Optional[str] = None, + job_title: Optional[str] = None, + tags: Optional[List[str]] = None, + custom_fields: Optional[Dict[str, Any]] = None, + avatar_path: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a new contact in Freshdesk. + + Args: + name: Name of the contact + email: Primary email address + phone: Telephone number + mobile: Mobile number + company_id: ID of the primary company + description: Description of the contact + job_title: Job title + tags: List of tags + custom_fields: Dictionary of custom field values + avatar_path: Path to avatar image file + **kwargs: Additional contact fields + + Returns: + Dict containing the created contact data or error information + """ + if not any([email, phone, mobile]): + return {"error": "At least one of email, phone, or mobile is required"} + + contact_data = { + "name": name, + "email": email, + "phone": phone, + "mobile": mobile, + "company_id": company_id, + "description": description, + "job_title": job_title, + "tags": tags, + "custom_fields": custom_fields, + **kwargs + } + + contact_data = remove_none_values(contact_data) + + options = { } + + try: + if avatar_path: + options["files"] = handle_freshdesk_attachments("avatar", [{"type": "local", "content": avatar_path, "name": os.path.basename(avatar_path), "media_type": mimetypes.guess_type(avatar_path)[0]}]) + + return await make_freshdesk_request("POST", "/contacts", data=contact_data, options=options) + except Exception as e: + return handle_freshdesk_error(e, "create", "contact") + +async def get_contact_by_id(contact_id: int) -> Dict[str, Any]: + """ + Retrieve a contact by ID. + + Args: + contact_id: ID of the contact to retrieve + + Returns: + Dict containing the contact data or error information + """ + try: + return await make_freshdesk_request("GET", f"/contacts/{contact_id}") + except Exception as e: + return handle_freshdesk_error(e, "retrieve", "contact") + +async def list_contacts( + email: Optional[str] = None, + phone: Optional[str] = None, + mobile: Optional[str] = None, + company_id: Optional[int] = None, + state: Optional[str] = None, + updated_since: Optional[str] = None, + page: int = 1, + per_page: int = 30 +) -> Dict[str, Any]: + """ + List all contacts, optionally filtered by parameters. + + Args: + email: Filter by email + phone: Filter by phone number + mobile: Filter by mobile number + company_id: Filter by company ID + state: Filter by state (verified, unverified, blocked, deleted) + updated_since: Filter by last updated date (ISO 8601 format) + page: Page number (1-based) + per_page: Number of results per page (1-100) + + Returns: + Dict containing the list of contacts and pagination info + """ + params = { + "email": email, + "phone": phone, + "mobile": mobile, + "company_id": company_id, + "state": state, + "updated_since": updated_since, + "page": page, + "per_page": min(per_page, 100) + } + + params = remove_none_values(params) + + try: + return await make_freshdesk_request("GET", "/contacts", options={"query_params": params}) + except Exception as e: + return handle_freshdesk_error(e, "list", "contacts") + + +async def update_contact( + contact_id: int, + name: Optional[str] = None, + email: Optional[str] = None, + phone: Optional[str] = None, + mobile: Optional[str] = None, + company_id: Optional[int] = None, + description: Optional[str] = None, + job_title: Optional[str] = None, + tags: Optional[List[str]] = None, + custom_fields: Optional[Dict[str, Any]] = None, + avatar_path: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Update an existing contact. + + Args: + contact_id: ID of the contact to update + name: New name + email: New primary email + phone: New phone number + mobile: New mobile number + company_id: New company ID + description: New description + job_title: New job title + tags: Updated list of tags + custom_fields: Updated custom fields + avatar_path: Path to new avatar image file + **kwargs: Additional contact fields to update + + Returns: + Dict containing the updated contact data or error information + """ + contact_data = { + "name": name, + "email": email, + "phone": phone, + "mobile": mobile, + "company_id": company_id, + "description": description, + "job_title": job_title, + "tags": tags, + "custom_fields": custom_fields, + **kwargs + } + + contact_data = remove_none_values(contact_data) + + if not contact_data: + raise ValueError("No fields to update") + + options = {} + + try: + if avatar_path: + options["files"] = handle_freshdesk_attachments("avatar", [{"type": "local", "content": avatar_path, "name": os.path.basename(avatar_path), "media_type": mimetypes.guess_type(avatar_path)[0]}]) + + return await make_freshdesk_request("PUT", f"/contacts/{contact_id}", data=contact_data, options=options) + + except Exception as e: + return handle_freshdesk_error(e, "update", "contact") + + + +async def delete_contact( + contact_id: int, + hard_delete: bool = False, + force: bool = False +) -> Dict[str, Any]: + """ + Delete a contact. + + Args: + contact_id: ID of the contact to delete + hard_delete: If True, permanently delete the contact + force: If True, force hard delete even if not soft deleted first + + Returns: + Dict with success status or error information + """ + try: + if hard_delete: + return await make_freshdesk_request( + "DELETE", + f"/contacts/{contact_id}/hard_delete?force={str(force).lower()}" + ) + else: + return await make_freshdesk_request("DELETE", f"/contacts/{contact_id}") + except Exception as e: + return handle_freshdesk_error(e, "delete", "contact") + + +async def filter_contacts( + query: str, + page: int = 1, + updated_since: Optional[str] = None +) -> Dict[str, Any]: + """ + Filter contacts using a query string. + + Args: + query: Filter query string (e.g., "priority:3 AND status:2 OR priority:4") + page: Page number (1-based) + updated_since: Filter by last updated date (ISO 8601 format) + + Returns: + Dict containing search results and pagination info + """ + params = { + "query": f"'{query}'", + "page": page, + "updated_since": updated_since + } + + params = remove_none_values(params) + + try: + return await make_freshdesk_request("GET", "/search/contacts", options={"query_params": params}) + except Exception as e: + return handle_freshdesk_error(e, "filter", "contacts") + +async def search_contacts_by_name(name: str) -> Dict[str, Any]: + """ + Search contacts by name for autocomplete. + + Args: + name: Search term (contact name or part of name) + + Returns: + List of matching contacts with basic info + """ + try: + return await make_freshdesk_request("GET", "/contacts/autocomplete", options={"query_params": {"term": name}}) + except Exception as e: + return handle_freshdesk_error(e, "search", "contacts") + +async def make_contact_agent( + contact_id: int, + occasional: bool = False, + signature: Optional[str] = None, + ticket_scope: int = 1, + skill_ids: Optional[List[int]] = None, + group_ids: Optional[List[int]] = None, + role_ids: Optional[List[int]] = None, + agent_type: str = "support_agent", + focus_mode: bool = True +) -> Dict[str, Any]: + """ + Convert a contact to an agent. + + Args: + contact_id: ID of the contact to convert + occasional: Whether agent is occasional + signature: HTML signature for the agent + ticket_scope: Ticket permission level (1=Global, 2=Group, 3=Restricted) + skill_ids: List of skill IDs + group_ids: List of group IDs + role_ids: List of role IDs + agent_type: Type of agent (support_agent, field_agent, collaborator) + focus_mode: Whether focus mode is enabled + + Returns: + Dict containing the agent data or error information + """ + agent_data = { + "occasional": occasional, + "signature": signature, + "ticket_scope": ticket_scope, + "skill_ids": skill_ids or [], + "group_ids": group_ids or [], + "role_ids": role_ids or [], + "type": agent_type, + "focus_mode": focus_mode + } + + agent_data = remove_none_values(agent_data) + + try: + return await make_freshdesk_request( + "PUT", + f"/contacts/{contact_id}/make_agent", + data=agent_data + ) + except Exception as e: + return handle_freshdesk_error(e, "create", "contact_agent") + +async def restore_contact(contact_id: int) -> Dict[str, Any]: + """ + Restore a soft-deleted contact. + + Args: + contact_id: ID of the contact to restore + + Returns: + Dict with success status or error information + """ + try: + return await make_freshdesk_request( + "PUT", + f"/contacts/{contact_id}/restore" + ) + except Exception as e: + return handle_freshdesk_error(e, "restore", "contact") + +async def send_contact_invite(contact_id: int) -> Dict[str, Any]: + """ + Send an activation email to a contact. + + Args: + contact_id: ID of the contact to invite + + Returns: + Dict with success status or error information + """ + try: + return await make_freshdesk_request( + "POST", + f"/contacts/{contact_id}/send_invite" + ) + except Exception as e: + return handle_freshdesk_error(e, "send", "contact_invite") + +async def merge_contacts( + primary_contact_id: int, + secondary_contact_ids: List[int], + contact_data: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Merge multiple contacts into a primary contact. + + Args: + primary_contact_id: ID of the contact to merge into + secondary_contact_ids: List of contact IDs to merge + contact_data: Optional dictionary of fields to update on the primary contact + + Returns: + Dict with success status or error information + """ + if not secondary_contact_ids: + raise ValueError("At least one secondary contact ID is required") + + merge_data = { + "primary_contact_id": primary_contact_id, + "secondary_contact_ids": secondary_contact_ids, + "contact": contact_data or {} + } + + try: + return await make_freshdesk_request( + "PUT", + f"/contacts/merge", + data=merge_data + ) + except Exception as e: + return handle_freshdesk_error(e, "merge", "contacts") \ No newline at end of file diff --git a/mcp_servers/freshdesk/tools/thread.py b/mcp_servers/freshdesk/tools/thread.py new file mode 100644 index 00000000..12db09c2 --- /dev/null +++ b/mcp_servers/freshdesk/tools/thread.py @@ -0,0 +1,360 @@ +import logging +from typing import Dict, List, Optional, Any, Union +from datetime import datetime +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values, handle_freshdesk_attachments + +# Configure logging +logger = logging.getLogger(__name__) + + +async def create_thread( + thread_type: str, + parent_id: int, + parent_type: str = "ticket", + title: Optional[str] = None, + created_by: Optional[str] = None, + anchor_id: Optional[int] = None, + anchor_type: Optional[str] = None, + participants_emails: Optional[List[str]] = None, + participants_agents: Optional[List[str]] = None, + additional_info: Optional[Dict[str, Any]] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a new thread in Freshdesk. + + Args: + thread_type: Type of thread (forward, discussion, private) + parent_id: ID of the parent object (usually ticket) + parent_type: Type of parent object (default: ticket) + title: Title of the thread + created_by: ID of the user creating the thread + anchor_id: ID of the anchor object (e.g., conversation ID) + anchor_type: Type of anchor object (e.g., conversation) + participants_emails: List of email addresses of participants + participants_agents: List of agent IDs of participants + additional_info: Additional information like email_config_id + **kwargs: Additional thread fields + + Returns: + Dictionary containing the created thread details + """ + try: + thread_data = { + "type": thread_type, + "parent": { + "id": str(parent_id), + "type": parent_type + }, + "title": title, + "created_by": created_by, + "additional_info": additional_info, + **kwargs + } + + # Add anchor if provided + if anchor_id and anchor_type: + thread_data["anchor"] = { + "id": str(anchor_id), + "type": anchor_type + } + + # Add participants if provided + if participants_emails or participants_agents: + thread_data["participants"] = {} + if participants_emails: + thread_data["participants"]["emails"] = participants_emails + if participants_agents: + thread_data["participants"]["agents"] = participants_agents + + thread_data = remove_none_values(thread_data) + + logger.info(f"Creating thread with data: {thread_data}") + + response = await make_freshdesk_request( + method="POST", + endpoint="/collaboration/threads", + data=thread_data + ) + + logger.info(f"Successfully created thread") + return response + + except Exception as e: + return handle_freshdesk_error(e, "create", "thread") + + +async def get_thread_by_id(thread_id: int) -> Dict[str, Any]: + """ + Get a thread by its ID. + + Args: + thread_id: ID of the thread to retrieve + + Returns: + Dictionary containing the thread details + """ + try: + logger.info(f"Retrieving thread with ID: {thread_id}") + + response = await make_freshdesk_request( + method="GET", + endpoint=f"/collaboration/threads/{thread_id}" + ) + + logger.info(f"Successfully retrieved thread") + return response + + except Exception as e: + return handle_freshdesk_error(e, "get", "thread") + + +async def update_thread( + thread_id: int, + title: Optional[str] = None, + description: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Update a thread in Freshdesk. + + Args: + thread_id: ID of the thread to update + title: New title for the thread + description: New description for the thread + **kwargs: Additional fields to update + + Returns: + Dictionary containing the updated thread details + """ + try: + thread_data = { + "title": title, + "description": description, + **kwargs + } + + thread_data = remove_none_values(thread_data) + + logger.info(f"Updating thread {thread_id} with data: {thread_data}") + + response = await make_freshdesk_request( + method="PUT", + endpoint=f"/collaboration/threads/{thread_id}", + data=thread_data + ) + + logger.info(f"Successfully updated thread") + return response + + except Exception as e: + return handle_freshdesk_error(e, "update", "thread") + + +async def delete_thread(thread_id: int) -> Dict[str, Any]: + """ + Delete a thread from Freshdesk. + Note: This is an irreversible action! + + Args: + thread_id: ID of the thread to delete + + Returns: + Dictionary containing the deletion result + """ + try: + logger.info(f"Deleting thread with ID: {thread_id}") + + response = await make_freshdesk_request( + method="DELETE", + endpoint=f"/collaboration/threads/{thread_id}" + ) + + logger.info(f"Successfully deleted thread") + return {"success": True, "message": f"Thread {thread_id} deleted successfully"} + + except Exception as e: + return handle_freshdesk_error(e, "delete", "thread") + + +async def create_thread_message( + thread_id: int, + body: str, + body_text: Optional[str] = None, + attachment_ids: Optional[List[int]] = None, + inline_attachment_ids: Optional[List[int]] = None, + participants_email_to: Optional[List[str]] = None, + participants_email_cc: Optional[List[str]] = None, + participants_email_bcc: Optional[List[str]] = None, + participants_email_from: Optional[str] = None, + additional_info: Optional[Dict[str, Any]] = None, + full_message: Optional[str] = None, + full_message_text: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a new message for a thread. + + Args: + thread_id: ID of the thread to add message to + body: HTML content of the message + body_text: Plain text content of the message + attachment_ids: List of attachment IDs to include + inline_attachment_ids: List of inline attachment IDs + participants_email_to: List of email addresses to send to + participants_email_cc: List of email addresses to CC + participants_email_bcc: List of email addresses to BCC + participants_email_from: Email address to send from + additional_info: Additional information like has_quoted_text, email_subject + full_message: HTML content with original and quoted text + full_message_text: Plain text with quoted text + **kwargs: Additional message fields + + Returns: + Dictionary containing the created message details + """ + try: + message_data = { + "thread_id": str(thread_id), + "body": body, + "body_text": body_text, + "attachment_ids": attachment_ids, + "inline_attachment_ids": inline_attachment_ids, + "full_message": full_message, + "full_message_text": full_message_text, + "additional_info": additional_info, + **kwargs + } + + # Add participants if provided + if any([participants_email_to, participants_email_cc, participants_email_bcc, participants_email_from]): + message_data["participants"] = { + "email": {} + } + + if participants_email_to: + message_data["participants"]["email"]["to"] = participants_email_to + if participants_email_cc: + message_data["participants"]["email"]["cc"] = participants_email_cc + if participants_email_bcc: + message_data["participants"]["email"]["bcc"] = participants_email_bcc + if participants_email_from: + message_data["participants"]["email"]["from"] = participants_email_from + + message_data = remove_none_values(message_data) + + logger.info(f"Creating message for thread {thread_id} with data: {message_data}") + + response = await make_freshdesk_request( + method="POST", + endpoint="/collaboration/messages", + data=message_data + ) + + logger.info(f"Successfully created thread message") + return response + + except Exception as e: + return handle_freshdesk_error(e, "create", "thread message") + + +async def get_thread_message_by_id(message_id: int) -> Dict[str, Any]: + """ + Get a thread message by its ID. + + Args: + message_id: ID of the message to retrieve + + Returns: + Dictionary containing the message details + """ + try: + logger.info(f"Retrieving thread message with ID: {message_id}") + + response = await make_freshdesk_request( + method="GET", + endpoint=f"/collaboration/messages/{message_id}" + ) + + logger.info(f"Successfully retrieved thread message") + return response + + except Exception as e: + return handle_freshdesk_error(e, "get", "thread message") + + +async def update_thread_message( + message_id: int, + body: Optional[str] = None, + body_text: Optional[str] = None, + attachment_ids: Optional[List[int]] = None, + inline_attachment_ids: Optional[List[int]] = None, + additional_info: Optional[Dict[str, Any]] = None, + **kwargs +) -> Dict[str, Any]: + """ + Update a thread message. + + Args: + message_id: ID of the message to update + body: New HTML content of the message + body_text: New plain text content of the message + attachment_ids: New list of attachment IDs + inline_attachment_ids: New list of inline attachment IDs + additional_info: New additional information + **kwargs: Additional fields to update + + Returns: + Dictionary containing the updated message details + """ + try: + message_data = { + "body": body, + "body_text": body_text, + "attachment_ids": attachment_ids, + "inline_attachment_ids": inline_attachment_ids, + "additional_info": additional_info, + **kwargs + } + + message_data = remove_none_values(message_data) + + logger.info(f"Updating thread message {message_id} with data: {message_data}") + + response = await make_freshdesk_request( + method="PUT", + endpoint=f"/collaboration/messages/{message_id}", + data=message_data + ) + + logger.info(f"Successfully updated thread message") + return response + + except Exception as e: + return handle_freshdesk_error(e, "update", "thread message") + + +async def delete_thread_message(message_id: int) -> Dict[str, Any]: + """ + Delete a thread message. + Note: This is an irreversible action! + + Args: + message_id: ID of the message to delete + + Returns: + Dictionary containing the deletion result + """ + try: + logger.info(f"Deleting thread message with ID: {message_id}") + + response = await make_freshdesk_request( + method="DELETE", + endpoint=f"/collaboration/messages/{message_id}" + ) + + logger.info(f"Successfully deleted thread message") + return {"success": True, "message": f"Thread message {message_id} deleted successfully"} + + except Exception as e: + return handle_freshdesk_error(e, "delete", "thread message") diff --git a/mcp_servers/freshdesk/tools/tickets.py b/mcp_servers/freshdesk/tools/tickets.py new file mode 100644 index 00000000..c143cbf8 --- /dev/null +++ b/mcp_servers/freshdesk/tools/tickets.py @@ -0,0 +1,769 @@ +import logging +from typing import Dict, List, Optional, Any, Union +from datetime import datetime +from .base import make_freshdesk_request, handle_freshdesk_error, remove_none_values, handle_freshdesk_attachments + +# Configure logging +logger = logging.getLogger(__name__) + +# Ticket statuses +STATUS_OPEN = 2 +STATUS_PENDING = 3 +STATUS_RESOLVED = 4 +STATUS_CLOSED = 5 + +# Ticket priorities +PRIORITY_LOW = 1 +PRIORITY_MEDIUM = 2 +PRIORITY_HIGH = 3 +PRIORITY_URGENT = 4 + +# Ticket sources +SOURCE_EMAIL = 1 +SOURCE_PORTAL = 2 +SOURCE_PHONE = 3 +SOURCE_CHAT = 7 +SOURCE_FEEDBACK = 9 +SOURCE_OUTBOUND_EMAIL = 10 + + + + +async def create_ticket( + subject: str, + description: str, + email: str, + name: Optional[str] = None, + priority: int = PRIORITY_MEDIUM, + status: int = STATUS_OPEN, + source: int = SOURCE_PORTAL, + tags: Optional[List[str]] = None, + custom_fields: Optional[Dict[str, Any]] = None, + cc_emails: Optional[List[str]] = None, + responder_id: Optional[int] = None, + parent_id: Optional[int] = None, + **kwargs +) -> Dict[str, Any]: + """ + Create a new ticket in Freshdesk. + + Args: + subject: Subject of the ticket + description: HTML content of the ticket + email: Email address of the requester + name: Name of the requester (required if email not provided) + priority: Priority of the ticket (1-4) + status: Status of the ticket (2-5) + source: Source of the ticket (1-10) + tags: List of tags to associate with the ticket + custom_fields: Key-value pairs of custom fields + cc_emails: List of email addresses to CC + responder_id: ID of the responder + parent_id: ID of the parent ticket. If provided, the ticket will be created as a child of the parent ticket. + **kwargs: Additional ticket fields (e.g., due_by, fr_due_by, group_id, etc.) + + Returns: + Dictionary containing the created ticket details + """ + try: + attachments = kwargs.pop("attachments", None) + + ticket_data = { + "subject": subject, + "description": description, + "email": email, + "priority": priority, + "status": status, + "source": source, + "name": name, + "tags": tags, + "custom_fields": custom_fields, + "cc_emails": cc_emails, + "responder_id": responder_id, + "parent_id": parent_id, + **kwargs + } + + options = { } + + ticket_type = kwargs.pop("ticket_type", None) + + if ticket_type: + ticket_data["type"] = ticket_type + + ticket_data = remove_none_values(ticket_data) + + # Handle attachments if provided + if attachments: + options["files"] = handle_freshdesk_attachments("attachments[]", attachments) + + logger.info(f"Creating ticket with data: {ticket_data}") + response = await make_freshdesk_request("POST", "/tickets", data=ticket_data, options=options) + return response + + except Exception as e: + logger.error(f"Failed to create ticket: {str(e)}") + return handle_freshdesk_error(e, "create", "ticket") + + + +async def create_ticket_with_attachments( + subject: str, + description: str, + email: str, + name: Optional[str] = None, + priority: int = PRIORITY_MEDIUM, + status: int = STATUS_OPEN, + source: int = SOURCE_PORTAL, + tags: Optional[List[str]] = None, + custom_fields: Optional[Dict[str, Any]] = None, + cc_emails: Optional[List[str]] = None, + attachments: Optional[List[Dict[str, Any]]] = None, + responder_id: Optional[int] = None, + parent_id: Optional[int] = None, + **kwargs +): + try: + return await create_ticket( + subject=subject, + description=description, + email=email, + name=name, + priority=priority, + status=status, + source=source, + tags=tags, + custom_fields=custom_fields, + cc_emails=cc_emails, + responder_id=responder_id, + parent_id=parent_id, + attachments=attachments, + **kwargs + ) + except Exception as e: + return handle_freshdesk_error(e, "create", "ticket") + + +async def get_ticket_by_id(ticket_id: int, include: str = None) -> Dict[str, Any]: + """ + Retrieve a ticket by its ID. + + Args: + ticket_id: ID of the ticket to retrieve + include: Optional query parameter to include additional data (e.g., 'conversations', 'requester', 'company', 'stats') + + Returns: + Dictionary containing ticket details + """ + try: + endpoint = f"/tickets/{ticket_id}" + if include: + endpoint += f"?include={include}" + + response = await make_freshdesk_request("GET", endpoint) + return response + + except Exception as e: + return handle_freshdesk_error(e, "retrieve", "ticket") + + +async def update_ticket( + ticket_id: int, + subject: Optional[str] = None, + description: Optional[str] = None, + priority: Optional[int] = None, + status: Optional[int] = None, + tags: Optional[List[str]] = None, + custom_fields: Optional[Dict[str, Any]] = None, + attachments: Optional[List[Dict[str, Any]]] = None, + **kwargs +) -> Dict[str, Any]: + """ + Update an existing ticket. + + Args: + ticket_id: ID of the ticket to update + subject: New subject (if updating) + description: New description (if updating) + priority: New priority (1-4, if updating) + status: New status (2-5, if updating) + tags: New tags (if updating) + attachments: New attachments (if updating) + custom_fields: Updated custom fields (if any) + **kwargs: Additional fields to update + + Returns: + Dictionary containing updated ticket details + """ + try: + attachments = attachments or [] + + update_data = { + "subject": subject, + "description": description, + "priority": priority, + "status": status, + "tags": tags, + "custom_fields": custom_fields, + **kwargs + } + + update_data = remove_none_values(update_data) + + if not update_data: + raise ValueError("No fields to update") + + logger.info(f"Updating ticket {ticket_id} with data: {update_data}") + + options = {} + + if attachments: + options["files"] = handle_freshdesk_attachments("attachments[]", attachments) + + response = await make_freshdesk_request("PUT", f"/tickets/{ticket_id}", data=update_data, options=options) + return response + + except Exception as e: + return handle_freshdesk_error(e, "update", "ticket") + + +async def delete_ticket(ticket_id: int) -> Dict[str, Any]: + """ + Delete a ticket. + + Args: + ticket_id: ID of the ticket to delete + + Returns: + Dictionary indicating success or failure + """ + try: + endpoint = f"/tickets/{ticket_id}" + + await make_freshdesk_request("DELETE", endpoint) + return {"success": True, "message": f"Ticket {ticket_id} deleted successfully"} + + except Exception as e: + return handle_freshdesk_error(e, "delete", "ticket") + + + +async def delete_multiple_tickets(ticket_ids: List[int]) -> Dict[str, Any]: + """ + Delete multiple tickets. + + Args: + ticket_ids: List of IDs of tickets to delete + + Returns: + Dictionary indicating success or failure + """ + + try: + + if not ticket_ids: + return {"success": False, "error": "No ticket IDs provided"} + + endpoint = "/tickets/bulk_delete" + data = { + "bulk_action":{ + "ids": ticket_ids + } + } + await make_freshdesk_request("POST", endpoint, data=data) + return {"success": True, "message": f"Tickets {ticket_ids} deleted successfully"} + except Exception as e: + return handle_freshdesk_error(e, "delete", "tickets") + + +async def delete_attachment(attachment_id: int) -> Dict[str, Any]: + """ + Delete an attachment from a ticket. + + Args: + attachment_id: ID of the attachment to delete + + Returns: + Dictionary indicating success or failure + """ + try: + endpoint = f"/attachments/{attachment_id}" + + await make_freshdesk_request("DELETE", endpoint) + return {"success": True, "message": f"Attachment {attachment_id} deleted successfully"} + + except Exception as e: + return handle_freshdesk_error(e, "delete", "attachment") + + +async def list_tickets( + status: Optional[int] = None, + priority: Optional[int] = None, + requester_id: Optional[int] = None, + agent_id: Optional[int] = None, + email: Optional[str] = None, + group_id: Optional[int] = None, + company_id: Optional[int] = None, + ticket_type: Optional[str] = None, + updated_since: Optional[Union[str, datetime]] = None, + due_by: Optional[Union[str, datetime]] = None, + page: int = 1, + per_page: int = 30, + order_type: Optional[str] = "desc", + order_by: Optional[str] = "created_at", + include: Optional[str] = None, + **filters +) -> Dict[str, Any]: + """ + List tickets with optional filtering. + + Args: + status: Filter by status (2-5) + priority: Filter by priority (1-4) + requester_id: Filter by requester ID + agent_id: Filter by agent ID (ID of the agent to whom the ticket has been assigned) + email: Filter by email address + group_id: Filter by group ID + company_id: Filter by company ID + ticket_type: Filter by ticket type + updated_since: Only return tickets updated since this date (ISO format or datetime object) + due_by: Only return tickets due by this date (ISO format or datetime object) + page: Page number (for pagination) + per_page: Number of results per page (max 100) + order_type: Order type (asc or desc) + order_by: Order by (created_at, updated_at, priority, status) + include: Include additional data (stats, requester, description) + **filters: Additional filters as keyword arguments + + Returns: + Dictionary containing list of tickets and pagination info + """ + try: + params = { + "status": status, + "priority": priority, + "requester_id": requester_id, + "agent_id": agent_id, + "email": email, + "group_id": group_id, + "company_id": company_id, + "type": ticket_type, + "updated_since": updated_since, + "due_by": due_by, + "page": page, + "per_page": min(per_page, 100), + "order_type": order_type, + "order_by": order_by, + "include": include, + **filters + } + + if ticket_type is not None: + del params["ticket_type"] + + if updated_since is not None: + if isinstance(updated_since, datetime): + updated_since = updated_since.isoformat() + params["updated_since"] = updated_since + + if due_by is not None: + if isinstance(due_by, datetime): + due_by = due_by.isoformat() + params["due_by"] = due_by + + if (params.get("created_since")): + if isinstance(params.get("created_since"), datetime): + params["created_since"] = params.get("created_since").isoformat() + + params = remove_none_values(params) + + response = await make_freshdesk_request( + "GET", + "/tickets", + options={"query_params": params} + ) + + return response + + except Exception as e: + return handle_freshdesk_error(e, "list", "tickets") + + +async def add_note_to_ticket( + ticket_id: int, + body: str, + private: bool = False, + user_id: Optional[int] = None, + incoming: Optional[bool] = False, + notify_emails: Optional[List[str]] = None, + attachments: Optional[List[Dict[str, Any]]] = None + +) -> Dict[str, Any]: + """ + Add a note to a ticket. + + Args: + ticket_id: ID of the ticket + body: Content of the note + private: Whether the note is private + user_id: ID of the agent adding the note (defaults to authenticated user) + incoming: Whether the note is incoming + notify_emails: List of email addresses to notify + attachments: List of attachments to add to the note + + Returns: + Dictionary containing the created note details + """ + try: + note_data = { + "body": body, + "private": private, + "incoming": incoming, + "notify_emails": notify_emails, + "user_id": user_id, + } + + note_data = remove_none_values(note_data) + + options = {} + + if attachments: + options["files"] = handle_freshdesk_attachments("attachments[]", attachments) + + response = await make_freshdesk_request( + "POST", + f"/tickets/{ticket_id}/notes", + data=note_data, + options=options + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "add_note_to", "ticket") + + +async def reply_to_a_ticket( + ticket_id: int, + body: str, + user_id: Optional[int] = None, + cc_emails: Optional[List[str]] = None, + bcc_emails: Optional[List[str]] = None, + from_email: Optional[str] = None, + attachments: Optional[List[Dict[str, Any]]] = None +) -> Dict[str, Any]: + """ + Reply to a ticket. + + Args: + ticket_id: ID of the ticket + body: Content of the reply + user_id: ID of the agent replying (defaults to authenticated user) + cc_emails: List of email addresses to CC + bcc_emails: List of email addresses to BCC + attachments: List of attachments to add to the reply + from_email: Email address to use as the sender + + Returns: + Dictionary containing the created reply details + """ + try: + data = { + "body": body, + "user_id": user_id, + "cc_emails": cc_emails or [], + "bcc_emails": bcc_emails or [], + "from_email": from_email + } + + data = remove_none_values(data) + + options = {} + + if attachments: + options["files"] = handle_freshdesk_attachments("attachments[]", attachments) + + response = await make_freshdesk_request( + "POST", + f"/tickets/{ticket_id}/reply", + data=data, + options=options + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "reply_to", "ticket") + + +async def update_note( + note_id: int, + body: str, + attachments: Optional[List[Dict[str, Any]]] = None +) -> Dict[str, Any]: + """ + Update a note or reply to a ticket. + + Args: + note_id: ID of the note + body: Content of the note + attachments: List of attachments to add to the note + + Returns: + Dictionary containing the updated note details + """ + try: + note_data = { + "body": body, + } + + note_data = remove_none_values(note_data) + + options = {} + + if attachments: + options["files"] = handle_freshdesk_attachments("attachments[]", attachments) + + response = await make_freshdesk_request( + "PUT", + f"/conversations/{note_id}", + data=note_data, + options=options + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "update", "note") + + +async def delete_note( + note_id: int, +) -> Dict[str, Any]: + """ + Delete a note or reply to a ticket. + + Args: + note_id: ID of the note + + Returns: + Dictionary containing the deleted note details + """ + try: + response = await make_freshdesk_request( + "DELETE", + f"/conversations/{note_id}", + ) + return response + + except Exception as e: + return handle_freshdesk_error(e, "delete", "note") + + +async def filter_tickets( + query: str, + page: int = 1, + per_page: int = 30 +) -> Dict[str, Any]: + """ + Filter tickets using a query string. + + Args: + query: Filter query string (e.g., "priority:3 AND status:2 OR priority:4") + page: Page number (for pagination) + per_page: Number of results per page (max 30) + + Returns: + Dictionary containing search results and pagination info + """ + try: + params = { + "query": f'"{query}"', + "page": page, + "per_page": min(per_page, 30) + } + + response = await make_freshdesk_request( + "GET", + "/search/tickets", + options={"query_params": params} + ) + + return response + + except Exception as e: + return handle_freshdesk_error(e, "filter", "tickets") + + + +async def merge_tickets( + primary_ticket_id: int, + ticket_ids: List[int], + convert_recepients_to_cc: Optional[bool] = None, +) -> Dict[str, Any]: + """ + Merge two tickets. + + Args: + primary_ticket_id: ID of the ticket to be merged (will be closed) + ticket_ids: List of IDs of tickets to merge into the primary ticket + + Returns: + Dictionary indicating success or failure + """ + try: + + merge_data = { + "primary_id": primary_ticket_id, + "ticket_ids": ticket_ids, + "convert_recepients_to_cc": convert_recepients_to_cc + } + + merge_data = remove_none_values(merge_data) + + await make_freshdesk_request( + "PUT", + f"/tickets/merge", + data=merge_data + ) + return {"success": True, "message": f"Ticket {primary_ticket_id} merged into {ticket_ids}"} + except Exception as e: + return handle_freshdesk_error(e, "merge", "tickets") + + +async def restore_ticket(ticket_id: int) -> Dict[str, Any]: + """ + Restore a deleted ticket. + + Args: + ticket_id: ID of the ticket to restore + + Returns: + Dictionary containing the restored ticket details + """ + try: + response = await make_freshdesk_request("PUT", f"/tickets/{ticket_id}/restore") + return response + except Exception as e: + return handle_freshdesk_error(e, "restore", "ticket") + + +async def watch_ticket(ticket_id: int, user_id: Optional[int] = None) -> Dict[str, Any]: + """ + Watch a ticket. + + Args: + ticket_id: ID of the ticket to watch + user_id: ID of the user to watch the ticket (defaults to authenticated user) + + Returns: + Dictionary indicating success or failure + """ + try: + data = { + "user_id": user_id + } + + data = remove_none_values(data) + + await make_freshdesk_request( + "POST", + f"/tickets/{ticket_id}/watch", + data=data + ) + return {"success": True, "message": f"Now watching ticket {ticket_id}"} + except Exception as e: + return handle_freshdesk_error(e, "watch", "ticket") + + +async def unwatch_ticket(ticket_id: int) -> Dict[str, Any]: + """ + Unwatch a ticket. + + Args: + ticket_id: ID of the ticket to unwatch + Returns: + Dictionary indicating success or failure + """ + try: + await make_freshdesk_request("PUT", f"/tickets/{ticket_id}/unwatch") + return {"success": True, "message": f"Stopped watching ticket {ticket_id}"} + except Exception as e: + return handle_freshdesk_error(e, "unwatch", "ticket") + + +async def forward_ticket( + ticket_id: int, + to_emails: List[str], + cc_emails: Optional[List[str]] = None, + bcc_emails: Optional[List[str]] = None, + body: Optional[str] = None, + subject: Optional[str] = None, + **kwargs +) -> Dict[str, Any]: + """ + Forward a ticket to additional email addresses. + + Args: + ticket_id: ID of the ticket to forward + to_emails: List of email addresses to forward to + cc_emails: Optional list of CC email addresses + bcc_emails: Optional list of BCC email addresses + body: Custom message to include in the forward + subject: Custom subject for the forwarded email + **kwargs: Additional parameters for the forward + + Returns: + Dictionary indicating success or failure + """ + try: + data = { + "to_emails": to_emails, + "cc_emails": cc_emails, + "bcc_emails": bcc_emails, + "body": body, + "subject": subject, + **kwargs + } + + data = remove_none_values(data) + + await make_freshdesk_request( + "POST", + f"/tickets/{ticket_id}/forward", + data=data + ) + return {"success": True, "message": f"Ticket {ticket_id} forwarded successfully"} + except Exception as e: + return handle_freshdesk_error(e, "forward", "ticket") + + +async def get_archived_ticket(ticket_id: int) -> Dict[str, Any]: + """ + Retrieve an archived ticket by its ID. + + Args: + ticket_id: ID of the archived ticket to retrieve + + Returns: + Dictionary containing the archived ticket details + """ + try: + response = await make_freshdesk_request("GET", f"/tickets/archived/{ticket_id}") + return response + except Exception as e: + return handle_freshdesk_error(e, "retrieve", "archived ticket") + + +async def delete_archived_ticket(ticket_id: int) -> Dict[str, Any]: + """ + Permanently delete an archived ticket. + + Args: + ticket_id: ID of the archived ticket to delete + + Returns: + Dictionary indicating success or failure + """ + try: + await make_freshdesk_request("DELETE", f"/tickets/archived/{ticket_id}") + return {"success": True, "message": f"Archived ticket {ticket_id} deleted successfully"} + except Exception as e: + return handle_freshdesk_error(e, "delete", "archived ticket") diff --git a/mcp_servers/freshdesk/util.py b/mcp_servers/freshdesk/util.py new file mode 100644 index 00000000..bee01d6f --- /dev/null +++ b/mcp_servers/freshdesk/util.py @@ -0,0 +1,66 @@ +import time +import math +from typing import Dict +from datetime import datetime, timedelta +import asyncio + +class FreshdeskRateLimiter: + def __init__(self): + self.rate_limit_total = 50 + self.rate_limit_remaining = 50 + self.rate_limit_reset = 60 + self.last_request_time = 0 + self.retry_after = 0 + self.window_start = time.time() + self.requests_in_window = 0 + + def update_from_headers(self, headers: Dict[str, str]) -> None: + """Update rate limit state from response headers""" + now = time.time() + + # Reset window if we're in a new minute + if now - self.window_start >= 60: + self.window_start = now + self.requests_in_window = 0 + + if 'X-RateLimit-Total' in headers: + self.rate_limit_total = int(headers['X-RateLimit-Total']) + if 'X-RateLimit-Remaining' in headers: + self.rate_limit_remaining = int(headers['X-RateLimit-Remaining']) + if 'Retry-After' in headers: + self.retry_after = int(headers['Retry-After']) + + self.requests_in_window += 1 + self.last_request_time = now + + def get_sleep_time(self) -> float: + """Calculate how long to sleep before next request""" + now = time.time() + + # If we hit rate limit, use Retry-After + if self.retry_after > 0: + return self.retry_after + + # Calculate time since last request to enforce minimum delay + time_since_last = now - self.last_request_time + min_delay = 0.1 + sleep_time = max(0, min_delay - time_since_last) + + # If we're approaching the rate limit, let's slow down + if self.rate_limit_remaining < self.rate_limit_total * 0.1: + time_left_in_window = 60 - (now - self.window_start) + if time_left_in_window > 0: + # Distribute remaining requests over time left + sleep_time = max(sleep_time, time_left_in_window / (self.rate_limit_remaining + 1)) + + return sleep_time + + async def wait_for_capacity(self, required_requests: int = 1) -> None: + """Wait until we have capacity to make the requested number of API calls""" + if self.rate_limit_remaining < required_requests: + sleep_time = self.get_sleep_time() + if sleep_time > 0: + await asyncio.sleep(sleep_time) + + +rate_limiter = FreshdeskRateLimiter() \ No newline at end of file diff --git a/mcp_servers/github/.env.example b/mcp_servers/github/.env.example index a49e3c34..a65f7123 100644 --- a/mcp_servers/github/.env.example +++ b/mcp_servers/github/.env.example @@ -1,10 +1,2 @@ -# GitHub Authentication Token -# Required for authenticated API requests -GITHUB_AUTH_TOKEN=your_github_personal_access_token - -# Server Configuration -# Port where the SSE server will listen (defaults to 5000 if not set) PORT=5000 - -# Base URL for the server (defaults to http://localhost:PORT if not set) -BASE_URL=http://localhost:5000 \ No newline at end of file +AUTH_DATA={"access_token":"YOUR_ACCESS_TOKEN"} diff --git a/mcp_servers/github/Dockerfile b/mcp_servers/github/Dockerfile index 9dbe2d61..7b331859 100644 --- a/mcp_servers/github/Dockerfile +++ b/mcp_servers/github/Dockerfile @@ -13,7 +13,7 @@ RUN go mod download COPY mcp_servers/github/ ./ # Build the application -RUN CGO_ENABLED=0 GOOS=linux go build -o sse_server sse_server.go +RUN CGO_ENABLED=0 GOOS=linux go build -o server server.go # Create a minimal production image FROM alpine:3.19 @@ -24,10 +24,10 @@ WORKDIR /app RUN apk --no-cache add ca-certificates # Copy the binary from the builder stage -COPY --from=builder /app/sse_server . +COPY --from=builder /app/server . # Expose the default port EXPOSE 5000 # Run the server -CMD ["./sse_server"] \ No newline at end of file +CMD ["./server"] \ No newline at end of file diff --git a/mcp_servers/github/README.md b/mcp_servers/github/README.md index 0ed5e5fe..d14b518b 100644 --- a/mcp_servers/github/README.md +++ b/mcp_servers/github/README.md @@ -1,80 +1,79 @@ # GitHub MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +A Model Context Protocol (MCP) server for GitHub integration. Interact with repositories, manage issues, search code, and more using GitHub's API through the standardized MCP interface. -This directory contains a Model Context Protocol (MCP) server designed to interact with the GitHub API. It is based on the official [github/github-mcp-server](https://github.com/github/github-mcp-server) implementation, adapted for use within the KlavisAI project. +## šŸš€ Quick Start - Run in 30 Seconds -This server allows MCP clients to leverage GitHub's API for tasks like retrieving repository contents, managing issues, searching code, and more, using the standardized MCP interface. +### 🌐 Using Hosted Service (Recommended for Production) -## Prerequisites +Get instant access to GitHub with our managed infrastructure - **no setup required**: -* **Docker:** (Recommended for running) Download and install Docker Desktop or Docker Engine. -* **Go:** Version 1.23 or later (Required for local development/running without Docker). -* **Git:** Required for cloning and version control. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -## Configuration +```bash +pip install klavis +# or +npm install klavis +``` -The server requires a GitHub Personal Access Token (PAT) for authenticated API access. +```python +from klavis import Klavis -1. **Create a `.env` file:** In the `mcp_servers/github` directory, create a file named `.env` by copying the example: - ```bash - cp mcp_servers/github/.env.example mcp_servers/github/.env - ``` -2. **Add your GitHub Token:** Edit the `.env` file and replace `your_github_personal_access_token` with your actual GitHub PAT. Ensure your token has the necessary permissions (e.g., `repo`, `read:user`) for the operations you intend to perform. - ```dotenv - # mcp_servers/github/.env - GITHUB_AUTH_TOKEN=ghp_YourActualGitHubTokenHere - ``` - * **Important:** Keep your `.env` file secure and do not commit it to version control. The `.gitignore` file in the project root should already be configured to ignore `.env` files. +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GITHUB", "user123") +``` -3. **Optional Variables:** - * `PORT`: The port the server listens on. Defaults to `5000`. - * `BASE_URL`: The base URL exposed by the server. Defaults to `http://localhost:{PORT}`. You generally don't need to change these for local Docker usage. +### 🐳 Using Docker (For Self-Hosting) -## Running with Docker (Recommended) +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/github-mcp-server:latest -Using Docker provides a containerized and consistent environment. -1. **Build the Docker Image:** Navigate to the **root** of the `klavis` repository (the directory containing this `mcp_servers` folder) and run the build command: - ```bash - docker build -t klavis-github-mcp -f mcp_servers/github/Dockerfile . - ``` - * `-t klavis-github-mcp`: Tags the image with the name `klavis-github-mcp`. - * `-f mcp_servers/github/Dockerfile`: Specifies the path to the Dockerfile. - * `.`: Sets the build context to the current directory (the `klavis` root), allowing the Dockerfile to copy files from `mcp_servers/github/`. +# Run GitHub MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/github-mcp-server:latest -2. **Run the Docker Container:** - ```bash - docker run --rm -p 5000:5000 --env-file mcp_servers/github/.env klavis-github-mcp - ``` - * `--rm`: Automatically removes the container when it exits. - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container (the default port the server listens on). - * `--env-file mcp_servers/github/.env`: Loads environment variables (specifically `GITHUB_AUTH_TOKEN`) from your `.env` file into the container. **Note:** The `.env` file itself is *not* copied into the image. - * `klavis-github-mcp`: The name of the image to run. +# Run GitHub MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"ghp_your_github_token_here"}' \ + ghcr.io/klavis-ai/github-mcp-server:latest +``` - The server should now be running and accessible at `http://localhost:5000`. +**OAuth Setup:** For OAuth authentication (recommended), use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys). This handles the complex OAuth flow automatically. -## Running Locally (Alternative) +**Manual Setup:** Alternatively, provide your GitHub Personal Access Token directly via `AUTH_DATA`. -If you prefer not to use Docker, you can run the server directly using Go. +## šŸ› ļø Available Tools -1. **Ensure Prerequisites:** Make sure you have Go 1.23+ installed. -2. **Navigate to Directory:** Open your terminal in the `mcp_servers/github` directory. -3. **Configure:** Create and configure the `.env` file as described in the "Configuration" section. -4. **Install Dependencies:** Download the necessary Go modules: - ```bash - go mod download - ``` -5. **Run the Server:** - ```bash - go run sse_server.go - ``` - The server will load the `GITHUB_AUTH_TOKEN` from the `.env` file in the current directory and start listening on port 5000 (or the port specified in `.env`). +- **Repository Management**: Get repository info, list files, read content +- **Issue Management**: Create, read, update issues and comments +- **Pull Requests**: Manage PRs, reviews, and merges +- **Search**: Search repositories, code, issues, and users +- **User Operations**: Get user profiles and organization info -## Usage +## šŸ“š Documentation & Support -Once the server is running (either via Docker or locally), you can interact with it using any MCP-compatible client (like `mcp-cli`). Point your client to the server's address (e.g., `http://localhost:5000`). +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -If you configured the server using the `GITHUB_AUTH_TOKEN` environment variable (e.g., via `--env-file` in Docker or the `.env` file locally), the server will use this token for all requests. +## šŸ¤ Contributing -Alternatively, if the server was started *without* the `GITHUB_AUTH_TOKEN` environment variable set, clients must provide the token via the `x-auth-token` header with each request. \ No newline at end of file +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/github/github-mcp-server b/mcp_servers/github/github-mcp-server new file mode 100755 index 00000000..e4337f8b Binary files /dev/null and b/mcp_servers/github/github-mcp-server differ diff --git a/mcp_servers/github/go.mod b/mcp_servers/github/go.mod index 858690cd..36994ab4 100644 --- a/mcp_servers/github/go.mod +++ b/mcp_servers/github/go.mod @@ -3,64 +3,37 @@ module github.com/github/github-mcp-server go 1.23.7 require ( - github.com/docker/docker v28.0.4+incompatible - github.com/google/go-cmp v0.7.0 github.com/google/go-github/v69 v69.2.0 - github.com/mark3labs/mcp-go v0.18.0 - github.com/migueleliasweb/go-github-mock v1.1.0 + github.com/joho/godotenv v1.5.1 + github.com/mark3labs/mcp-go v0.31.0 + github.com/migueleliasweb/go-github-mock v1.3.0 github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.9.1 github.com/spf13/viper v1.20.1 github.com/stretchr/testify v1.10.0 ) require ( - github.com/Microsoft/go-winio v0.6.2 // indirect - github.com/containerd/log v0.1.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/distribution/reference v0.6.0 // indirect - github.com/docker/go-connections v0.5.0 // indirect - github.com/docker/go-units v0.5.0 // indirect - github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/fsnotify/fsnotify v1.8.0 // indirect - github.com/go-logr/logr v1.4.2 // indirect - github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-viper/mapstructure/v2 v2.2.1 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/google/go-github/v64 v64.0.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/google/go-github/v71 v71.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/uuid v1.6.0 // indirect - github.com/gorilla/mux v1.8.0 // indirect - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/moby/docker-image-spec v1.3.1 // indirect - github.com/moby/term v0.5.0 // indirect - github.com/morikuni/aec v1.0.0 // indirect - github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.1 // indirect - github.com/pelletier/go-toml/v2 v2.2.3 // indirect - github.com/pkg/errors v0.9.1 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/rogpeppe/go-internal v1.13.1 // indirect github.com/sagikazarmark/locafero v0.9.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.14.0 // indirect - github.com/spf13/cast v1.7.1 // indirect + github.com/spf13/cast v1.8.0 // indirect github.com/spf13/pflag v1.0.6 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect - go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect - go.opentelemetry.io/otel v1.35.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 // indirect - go.opentelemetry.io/otel/metric v1.35.0 // indirect - go.opentelemetry.io/otel/sdk v1.35.0 // indirect - go.opentelemetry.io/otel/trace v1.35.0 // indirect - go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/sys v0.31.0 // indirect - golang.org/x/text v0.23.0 // indirect - golang.org/x/time v0.5.0 // indirect - google.golang.org/protobuf v1.36.5 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.25.0 // indirect + golang.org/x/time v0.11.0 // indirect + gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - gotest.tools/v3 v3.5.1 // indirect ) diff --git a/mcp_servers/github/go.sum b/mcp_servers/github/go.sum index 19d368de..4d9c7b44 100644 --- a/mcp_servers/github/go.sum +++ b/mcp_servers/github/go.sum @@ -1,86 +1,46 @@ -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= -github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= -github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= -github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= -github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= -github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= -github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/docker v28.0.4+incompatible h1:JNNkBctYKurkw6FrHfKqY0nKIDf5nrbxjVBtS+cdcok= -github.com/docker/docker v28.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= -github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= -github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-github/v64 v64.0.0 h1:4G61sozmY3eiPAjjoOHponXDBONm+utovTKbyUb2Qdg= -github.com/google/go-github/v64 v64.0.0/go.mod h1:xB3vqMQNdHzilXBiO2I+M7iEFtHf+DP/omBOv6tQzVo= github.com/google/go-github/v69 v69.2.0 h1:wR+Wi/fN2zdUx9YxSmYE0ktiX9IAR/BeePzeaUUbEHE= github.com/google/go-github/v69 v69.2.0/go.mod h1:xne4jymxLR6Uj9b7J7PyTpkMYstEMMwGZa0Aehh1azM= +github.com/google/go-github/v71 v71.0.0 h1:Zi16OymGKZZMm8ZliffVVJ/Q9YZreDKONCr+WUd0Z30= +github.com/google/go-github/v71 v71.0.0/go.mod h1:URZXObp2BLlMjwu0O8g4y6VBneUj2bCHgnI8FfgZ51M= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/mark3labs/mcp-go v0.18.0 h1:YuhgIVjNlTG2ZOwmrkORWyPTp0dz1opPEqvsPtySXao= -github.com/mark3labs/mcp-go v0.18.0/go.mod h1:KmJndYv7GIgcPVwEKJjNcbhVQ+hJGJhrCCB/9xITzpE= -github.com/migueleliasweb/go-github-mock v1.1.0 h1:GKaOBPsrPGkAKgtfuWY8MclS1xR6MInkx1SexJucMwE= -github.com/migueleliasweb/go-github-mock v1.1.0/go.mod h1:pYe/XlGs4BGMfRY4vmeixVsODHnVDDhJ9zoi0qzSMHc= -github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= -github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= -github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= -github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= -github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= -github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= -github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/mark3labs/mcp-go v0.31.0 h1:4UxSV8aM770OPmTvaVe/b1rA2oZAjBMhGBfUgOGut+4= +github.com/mark3labs/mcp-go v0.31.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4= +github.com/migueleliasweb/go-github-mock v1.3.0 h1:2sVP9JEMB2ubQw1IKto3/fzF51oFC6eVWOOFDgQoq88= +github.com/migueleliasweb/go-github-mock v1.3.0/go.mod h1:ipQhV8fTcj/G6m7BKzin08GaJ/3B5/SonRAkgrk0zCY= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k= github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= @@ -89,10 +49,8 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= -github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= -github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= -github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= +github.com/spf13/cast v1.8.0 h1:gEN9K4b8Xws4EX0+a0reLmhq8moKn7ntRlQYgjPeCDk= +github.com/spf13/cast v1.8.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4= @@ -105,75 +63,19 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8 github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 h1:1fTNlAIJZGWLP5FVu0fikVry1IsiUnXjf7QFvoNN3Xw= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0/go.mod h1:zjPK58DtkqQFn+YUMbx0M2XV3QgKU0gS9LeGohREyK4= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= -go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= -go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU/3i4= -go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= -golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d h1:H8tOf8XM88HvKqLTxe755haY6r1fqqzLbEnfrmLXlSA= -google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4= -google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU= -google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= -google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= -google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= -gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/mcp_servers/github/pkg/github/code_scanning.go b/mcp_servers/github/pkg/github/code_scanning.go index 4fc029bf..2a7f7258 100644 --- a/mcp_servers/github/pkg/github/code_scanning.go +++ b/mcp_servers/github/pkg/github/code_scanning.go @@ -14,7 +14,7 @@ import ( ) func GetCodeScanningAlert(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_code_scanning_alert", + return mcp.NewTool("github_get_code_scanning_alert", mcp.WithDescription(t("TOOL_GET_CODE_SCANNING_ALERT_DESCRIPTION", "Get details of a specific code scanning alert in a GitHub repository.")), mcp.WithString("owner", mcp.Required(), @@ -72,7 +72,7 @@ func GetCodeScanningAlert(getClient GetClientFn, t translations.TranslationHelpe } func ListCodeScanningAlerts(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("list_code_scanning_alerts", + return mcp.NewTool("github_list_code_scanning_alerts", mcp.WithDescription(t("TOOL_LIST_CODE_SCANNING_ALERTS_DESCRIPTION", "List code scanning alerts in a GitHub repository.")), mcp.WithString("owner", mcp.Required(), diff --git a/mcp_servers/github/pkg/github/issues.go b/mcp_servers/github/pkg/github/issues.go index 16c34141..3b8e517d 100644 --- a/mcp_servers/github/pkg/github/issues.go +++ b/mcp_servers/github/pkg/github/issues.go @@ -16,7 +16,7 @@ import ( // GetIssue creates a tool to get details of a specific issue in a GitHub repository. func GetIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_issue", + return mcp.NewTool("github_get_issue", mcp.WithDescription(t("TOOL_GET_ISSUE_DESCRIPTION", "Get details of a specific issue in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -74,7 +74,7 @@ func GetIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (tool // AddIssueComment creates a tool to add a comment to an issue. func AddIssueComment(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("add_issue_comment", + return mcp.NewTool("github_add_issue_comment", mcp.WithDescription(t("TOOL_ADD_ISSUE_COMMENT_DESCRIPTION", "Add a comment to an existing issue")), mcp.WithString("owner", mcp.Required(), @@ -144,7 +144,7 @@ func AddIssueComment(getClient GetClientFn, t translations.TranslationHelperFunc // SearchIssues creates a tool to search for issues and pull requests. func SearchIssues(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("search_issues", + return mcp.NewTool("github_search_issues", mcp.WithDescription(t("TOOL_SEARCH_ISSUES_DESCRIPTION", "Search for issues and pull requests across GitHub repositories")), mcp.WithString("q", mcp.Required(), @@ -228,7 +228,7 @@ func SearchIssues(getClient GetClientFn, t translations.TranslationHelperFunc) ( // CreateIssue creates a tool to create a new issue in a GitHub repository. func CreateIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_issue", + return mcp.NewTool("github_create_issue", mcp.WithDescription(t("TOOL_CREATE_ISSUE_DESCRIPTION", "Create a new issue in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -346,7 +346,7 @@ func CreateIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (t // ListIssues creates a tool to list and filter repository issues func ListIssues(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("list_issues", + return mcp.NewTool("github_list_issues", mcp.WithDescription(t("TOOL_LIST_ISSUES_DESCRIPTION", "List issues in a GitHub repository with filtering options")), mcp.WithString("owner", mcp.Required(), @@ -427,11 +427,11 @@ func ListIssues(getClient GetClientFn, t translations.TranslationHelperFunc) (to opts.Since = timestamp } - if page, ok := request.Params.Arguments["page"].(float64); ok { + if page, ok := request.GetArguments()["page"].(float64); ok { opts.Page = int(page) } - if perPage, ok := request.Params.Arguments["perPage"].(float64); ok { + if perPage, ok := request.GetArguments()["perPage"].(float64); ok { opts.PerPage = int(perPage) } @@ -464,7 +464,7 @@ func ListIssues(getClient GetClientFn, t translations.TranslationHelperFunc) (to // UpdateIssue creates a tool to update an existing issue in a GitHub repository. func UpdateIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("update_issue", + return mcp.NewTool("github_update_issue", mcp.WithDescription(t("TOOL_UPDATE_ISSUE_DESCRIPTION", "Update an existing issue in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -606,7 +606,7 @@ func UpdateIssue(getClient GetClientFn, t translations.TranslationHelperFunc) (t // GetIssueComments creates a tool to get comments for a GitHub issue. func GetIssueComments(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_issue_comments", + return mcp.NewTool("github_get_issue_comments", mcp.WithDescription(t("TOOL_GET_ISSUE_COMMENTS_DESCRIPTION", "Get comments for a GitHub issue")), mcp.WithString("owner", mcp.Required(), diff --git a/mcp_servers/github/pkg/github/pullrequests.go b/mcp_servers/github/pkg/github/pullrequests.go index 14aeb918..77b34661 100644 --- a/mcp_servers/github/pkg/github/pullrequests.go +++ b/mcp_servers/github/pkg/github/pullrequests.go @@ -15,7 +15,7 @@ import ( // GetPullRequest creates a tool to get details of a specific pull request. func GetPullRequest(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_pull_request", + return mcp.NewTool("github_get_pull_request", mcp.WithDescription(t("TOOL_GET_PULL_REQUEST_DESCRIPTION", "Get details of a specific pull request")), mcp.WithString("owner", mcp.Required(), @@ -73,7 +73,7 @@ func GetPullRequest(getClient GetClientFn, t translations.TranslationHelperFunc) // UpdatePullRequest creates a tool to update an existing pull request. func UpdatePullRequest(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("update_pull_request", + return mcp.NewTool("github_update_pull_request", mcp.WithDescription(t("TOOL_UPDATE_PULL_REQUEST_DESCRIPTION", "Update an existing pull request in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -190,7 +190,7 @@ func UpdatePullRequest(getClient GetClientFn, t translations.TranslationHelperFu // ListPullRequests creates a tool to list and filter repository pull requests. func ListPullRequests(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("list_pull_requests", + return mcp.NewTool("github_list_pull_requests", mcp.WithDescription(t("TOOL_LIST_PULL_REQUESTS_DESCRIPTION", "List and filter repository pull requests")), mcp.WithString("owner", mcp.Required(), @@ -292,7 +292,7 @@ func ListPullRequests(getClient GetClientFn, t translations.TranslationHelperFun // MergePullRequest creates a tool to merge a pull request. func MergePullRequest(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("merge_pull_request", + return mcp.NewTool("github_merge_pull_request", mcp.WithDescription(t("TOOL_MERGE_PULL_REQUEST_DESCRIPTION", "Merge a pull request")), mcp.WithString("owner", mcp.Required(), @@ -376,7 +376,7 @@ func MergePullRequest(getClient GetClientFn, t translations.TranslationHelperFun // GetPullRequestFiles creates a tool to get the list of files changed in a pull request. func GetPullRequestFiles(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_pull_request_files", + return mcp.NewTool("github_get_pull_request_files", mcp.WithDescription(t("TOOL_GET_PULL_REQUEST_FILES_DESCRIPTION", "Get the list of files changed in a pull request")), mcp.WithString("owner", mcp.Required(), @@ -435,7 +435,7 @@ func GetPullRequestFiles(getClient GetClientFn, t translations.TranslationHelper // GetPullRequestStatus creates a tool to get the combined status of all status checks for a pull request. func GetPullRequestStatus(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_pull_request_status", + return mcp.NewTool("github_get_pull_request_status", mcp.WithDescription(t("TOOL_GET_PULL_REQUEST_STATUS_DESCRIPTION", "Get the combined status of all status checks for a pull request")), mcp.WithString("owner", mcp.Required(), @@ -508,7 +508,7 @@ func GetPullRequestStatus(getClient GetClientFn, t translations.TranslationHelpe // UpdatePullRequestBranch creates a tool to update a pull request branch with the latest changes from the base branch. func UpdatePullRequestBranch(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("update_pull_request_branch", + return mcp.NewTool("github_update_pull_request_branch", mcp.WithDescription(t("TOOL_UPDATE_PULL_REQUEST_BRANCH_DESCRIPTION", "Update a pull request branch with the latest changes from the base branch")), mcp.WithString("owner", mcp.Required(), @@ -582,7 +582,7 @@ func UpdatePullRequestBranch(getClient GetClientFn, t translations.TranslationHe // GetPullRequestComments creates a tool to get the review comments on a pull request. func GetPullRequestComments(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_pull_request_comments", + return mcp.NewTool("github_get_pull_request_comments", mcp.WithDescription(t("TOOL_GET_PULL_REQUEST_COMMENTS_DESCRIPTION", "Get the review comments on a pull request")), mcp.WithString("owner", mcp.Required(), @@ -646,7 +646,7 @@ func GetPullRequestComments(getClient GetClientFn, t translations.TranslationHel // GetPullRequestReviews creates a tool to get the reviews on a pull request. func GetPullRequestReviews(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_pull_request_reviews", + return mcp.NewTool("github_get_pull_request_reviews", mcp.WithDescription(t("TOOL_GET_PULL_REQUEST_REVIEWS_DESCRIPTION", "Get the reviews on a pull request")), mcp.WithString("owner", mcp.Required(), @@ -704,7 +704,7 @@ func GetPullRequestReviews(getClient GetClientFn, t translations.TranslationHelp // CreatePullRequestReview creates a tool to submit a review on a pull request. func CreatePullRequestReview(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_pull_request_review", + return mcp.NewTool("github_create_pull_request_review", mcp.WithDescription(t("TOOL_CREATE_PULL_REQUEST_REVIEW_DESCRIPTION", "Create a review on a pull request")), mcp.WithString("owner", mcp.Required(), @@ -811,7 +811,7 @@ func CreatePullRequestReview(getClient GetClientFn, t translations.TranslationHe } // Add comments if provided - if commentsObj, ok := request.Params.Arguments["comments"].([]interface{}); ok && len(commentsObj) > 0 { + if commentsObj, ok := request.GetArguments()["comments"].([]interface{}); ok && len(commentsObj) > 0 { comments := []*github.DraftReviewComment{} for _, c := range commentsObj { @@ -900,7 +900,7 @@ func CreatePullRequestReview(getClient GetClientFn, t translations.TranslationHe // CreatePullRequest creates a tool to create a new pull request. func CreatePullRequest(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_pull_request", + return mcp.NewTool("github_create_pull_request", mcp.WithDescription(t("TOOL_CREATE_PULL_REQUEST_DESCRIPTION", "Create a new pull request in a GitHub repository")), mcp.WithString("owner", mcp.Required(), diff --git a/mcp_servers/github/pkg/github/repositories.go b/mcp_servers/github/pkg/github/repositories.go index f52c0341..e752a9e6 100644 --- a/mcp_servers/github/pkg/github/repositories.go +++ b/mcp_servers/github/pkg/github/repositories.go @@ -6,6 +6,8 @@ import ( "fmt" "io" "net/http" + "strings" + "time" "github.com/github/github-mcp-server/pkg/translations" "github.com/google/go-github/v69/github" @@ -15,7 +17,7 @@ import ( // ListCommits creates a tool to get commits of a branch in a repository. func ListCommits(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("list_commits", + return mcp.NewTool("github_list_commits", mcp.WithDescription(t("TOOL_LIST_COMMITS_DESCRIPTION", "Get list of commits of a branch in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -85,7 +87,7 @@ func ListCommits(getClient GetClientFn, t translations.TranslationHelperFunc) (t // CreateOrUpdateFile creates a tool to create or update a file in a GitHub repository. func CreateOrUpdateFile(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_or_update_file", + return mcp.NewTool("github_create_or_update_file", mcp.WithDescription(t("TOOL_CREATE_OR_UPDATE_FILE_DESCRIPTION", "Create or update a single file in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -190,7 +192,7 @@ func CreateOrUpdateFile(getClient GetClientFn, t translations.TranslationHelperF // CreateRepository creates a tool to create a new GitHub repository. func CreateRepository(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_repository", + return mcp.NewTool("github_create_repository", mcp.WithDescription(t("TOOL_CREATE_REPOSITORY_DESCRIPTION", "Create a new GitHub repository in your account")), mcp.WithString("name", mcp.Required(), @@ -260,7 +262,7 @@ func CreateRepository(getClient GetClientFn, t translations.TranslationHelperFun // GetFileContents creates a tool to get the contents of a file or directory from a GitHub repository. func GetFileContents(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_file_contents", + return mcp.NewTool("github_get_file_contents", mcp.WithDescription(t("TOOL_GET_FILE_CONTENTS_DESCRIPTION", "Get the contents of a file or directory from a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -333,7 +335,7 @@ func GetFileContents(getClient GetClientFn, t translations.TranslationHelperFunc // ForkRepository creates a tool to fork a repository. func ForkRepository(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("fork_repository", + return mcp.NewTool("github_fork_repository", mcp.WithDescription(t("TOOL_FORK_REPOSITORY_DESCRIPTION", "Fork a GitHub repository to your account or specified organization")), mcp.WithString("owner", mcp.Required(), @@ -400,7 +402,7 @@ func ForkRepository(getClient GetClientFn, t translations.TranslationHelperFunc) // CreateBranch creates a tool to create a new branch. func CreateBranch(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("create_branch", + return mcp.NewTool("github_create_branch", mcp.WithDescription(t("TOOL_CREATE_BRANCH_DESCRIPTION", "Create a new branch in a GitHub repository")), mcp.WithString("owner", mcp.Required(), @@ -485,7 +487,7 @@ func CreateBranch(getClient GetClientFn, t translations.TranslationHelperFunc) ( // PushFiles creates a tool to push multiple files in a single commit to a GitHub repository. func PushFiles(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("push_files", + return mcp.NewTool("github_push_files", mcp.WithDescription(t("TOOL_PUSH_FILES_DESCRIPTION", "Push multiple files to a GitHub repository in a single commit")), mcp.WithString("owner", mcp.Required(), @@ -543,7 +545,7 @@ func PushFiles(getClient GetClientFn, t translations.TranslationHelperFunc) (too } // Parse files parameter - this should be an array of objects with path and content - filesObj, ok := request.Params.Arguments["files"].([]interface{}) + filesObj, ok := request.GetArguments()["files"].([]interface{}) if !ok { return mcp.NewToolResultError("files parameter must be an array of objects with path and content"), nil } @@ -630,3 +632,130 @@ func PushFiles(getClient GetClientFn, t translations.TranslationHelperFunc) (too return mcp.NewToolResultText(string(r)), nil } } + +// ListStargazers creates a tool to list users who have starred a GitHub repository. note: we use the graphql api and prompt to get accurate result! +func ListStargazers(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { + return mcp.NewTool("github_list_recent_stargazers", + mcp.WithDescription(t("TOOL_LIST_RECENT_STARGAZERS_DESCRIPTION", "Get a comprehensive list of users who have recently starred a specified GitHub repository, Ensure that the list includes every user without any omissions.")), + mcp.WithString("owner", + mcp.Required(), + mcp.Description("Repository owner (username or organization)"), + ), + mcp.WithString("repo", + mcp.Required(), + mcp.Description("Repository name"), + ), + WithPagination(), + ), + func(ctx context.Context, request mcp.CallToolRequest) (*mcp.CallToolResult, error) { + owner, err := requiredParam[string](request, "owner") + if err != nil { + return mcp.NewToolResultError(err.Error()), nil + } + repo, err := requiredParam[string](request, "repo") + if err != nil { + return mcp.NewToolResultError(err.Error()), nil + } + pagination, err := OptionalPaginationParams(request) + if err != nil { + return mcp.NewToolResultError(err.Error()), nil + } + + client, err := getClient(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get GitHub client: %w", err) + } + + // GraphQL query to fetch stargazers sorted by starred date + query := ` + query($owner: String!, $name: String!, $first: Int!) { + repository(owner: $owner, name: $name) { + stargazers(first: $first, orderBy: {field: STARRED_AT, direction: DESC}) { + edges { + starredAt + node { + login + } + } + } + } + } + ` + + variables := map[string]interface{}{ + "owner": owner, + "name": repo, + "first": pagination.perPage, + } + + req, err := client.NewRequest("POST", "graphql", map[string]interface{}{ + "query": query, + "variables": variables, + }) + if err != nil { + return nil, fmt.Errorf("failed to create GraphQL request: %w", err) + } + + type GraphQLResponse struct { + Data struct { + Repository struct { + Stargazers struct { + Edges []struct { + StarredAt string `json:"starredAt"` + Node struct { + Login string `json:"login"` + } `json:"node"` + } `json:"edges"` + } `json:"stargazers"` + } `json:"repository"` + } `json:"data"` + Errors []struct { + Message string `json:"message"` + } `json:"errors,omitempty"` + } + + var response GraphQLResponse + resp, err := client.Do(ctx, req, &response) + if err != nil { + return nil, fmt.Errorf("failed to execute GraphQL request: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != 200 { + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + return mcp.NewToolResultError(fmt.Sprintf("failed to list stargazers: %s", string(body))), nil + } + + if len(response.Errors) > 0 { + errMsgs := make([]string, len(response.Errors)) + for i, e := range response.Errors { + errMsgs[i] = e.Message + } + return mcp.NewToolResultError(fmt.Sprintf("GraphQL errors: %s", strings.Join(errMsgs, "; "))), nil + } + + type Stargazer struct { + Login string `json:"login"` + StarredAt time.Time `json:"starred_at,omitempty"` + } + + stargazers := make([]Stargazer, 0, len(response.Data.Repository.Stargazers.Edges)) + for _, edge := range response.Data.Repository.Stargazers.Edges { + starredAt, _ := time.Parse(time.RFC3339, edge.StarredAt) + stargazers = append(stargazers, Stargazer{ + Login: edge.Node.Login, + StarredAt: starredAt, + }) + } + + r, err := json.Marshal(stargazers) + if err != nil { + return nil, fmt.Errorf("failed to marshal response: %w", err) + } + + return mcp.NewToolResultText(string(r)), nil + } +} diff --git a/mcp_servers/github/pkg/github/repository_resource.go b/mcp_servers/github/pkg/github/repository_resource.go index 949157f5..4d951609 100644 --- a/mcp_servers/github/pkg/github/repository_resource.go +++ b/mcp_servers/github/pkg/github/repository_resource.go @@ -67,6 +67,7 @@ func RepositoryResourceContentsHandler(getClient GetClientFn) func(ctx context.C return func(ctx context.Context, request mcp.ReadResourceRequest) ([]mcp.ResourceContents, error) { // the matcher will give []string with one element // https://github.com/mark3labs/mcp-go/pull/54 + o, ok := request.Params.Arguments["owner"].([]string) if !ok || len(o) == 0 { return nil, errors.New("owner is required") diff --git a/mcp_servers/github/pkg/github/search.go b/mcp_servers/github/pkg/github/search.go index 75810e24..06cbd08e 100644 --- a/mcp_servers/github/pkg/github/search.go +++ b/mcp_servers/github/pkg/github/search.go @@ -14,7 +14,7 @@ import ( // SearchRepositories creates a tool to search for GitHub repositories. func SearchRepositories(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("search_repositories", + return mcp.NewTool("github_search_repositories", mcp.WithDescription(t("TOOL_SEARCH_REPOSITORIES_DESCRIPTION", "Search for GitHub repositories")), mcp.WithString("query", mcp.Required(), @@ -68,7 +68,7 @@ func SearchRepositories(getClient GetClientFn, t translations.TranslationHelperF // SearchCode creates a tool to search for code across GitHub repositories. func SearchCode(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("search_code", + return mcp.NewTool("github_search_code", mcp.WithDescription(t("TOOL_SEARCH_CODE_DESCRIPTION", "Search for code across GitHub repositories")), mcp.WithString("q", mcp.Required(), @@ -140,7 +140,7 @@ func SearchCode(getClient GetClientFn, t translations.TranslationHelperFunc) (to // SearchUsers creates a tool to search for GitHub users. func SearchUsers(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("search_users", + return mcp.NewTool("github_search_users", mcp.WithDescription(t("TOOL_SEARCH_USERS_DESCRIPTION", "Search for GitHub users")), mcp.WithString("q", mcp.Required(), diff --git a/mcp_servers/github/pkg/github/server.go b/mcp_servers/github/pkg/github/server.go index 9dee1596..7a93cd50 100644 --- a/mcp_servers/github/pkg/github/server.go +++ b/mcp_servers/github/pkg/github/server.go @@ -62,6 +62,7 @@ func NewServer(getClient GetClientFn, version string, readOnly bool, t translati s.AddTool(SearchRepositories(getClient, t)) s.AddTool(GetFileContents(getClient, t)) s.AddTool(ListCommits(getClient, t)) + s.AddTool(ListStargazers(getClient, t)) if !readOnly { s.AddTool(CreateOrUpdateFile(getClient, t)) s.AddTool(CreateRepository(getClient, t)) @@ -85,7 +86,7 @@ func NewServer(getClient GetClientFn, version string, readOnly bool, t translati // GetMe creates a tool to get details of the authenticated user. func GetMe(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) { - return mcp.NewTool("get_me", + return mcp.NewTool("github_get_me", mcp.WithDescription(t("TOOL_GET_ME_DESCRIPTION", "Get details of the authenticated GitHub user. Use this when a request include \"me\", \"my\"...")), mcp.WithString("reason", mcp.Description("Optional: reason the session was created"), @@ -123,7 +124,8 @@ func GetMe(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mc // It returns the value, a boolean indicating if the parameter was present, and an error if the type is wrong. func OptionalParamOK[T any](r mcp.CallToolRequest, p string) (value T, ok bool, err error) { // Check if the parameter is present in the request - val, exists := r.Params.Arguments[p] + args := r.GetArguments() + val, exists := args[p] if !exists { // Not present, return zero value, false, no error return @@ -158,21 +160,22 @@ func requiredParam[T comparable](r mcp.CallToolRequest, p string) (T, error) { var zero T // Check if the parameter is present in the request - if _, ok := r.Params.Arguments[p]; !ok { + args := r.GetArguments() + if _, ok := args[p]; !ok { return zero, fmt.Errorf("missing required parameter: %s", p) } // Check if the parameter is of the expected type - if _, ok := r.Params.Arguments[p].(T); !ok { + if _, ok := args[p].(T); !ok { return zero, fmt.Errorf("parameter %s is not of type %T", p, zero) } - if r.Params.Arguments[p].(T) == zero { + if args[p].(T) == zero { return zero, fmt.Errorf("missing required parameter: %s", p) } - return r.Params.Arguments[p].(T), nil + return args[p].(T), nil } // RequiredInt is a helper function that can be used to fetch a requested parameter from the request. @@ -196,16 +199,17 @@ func OptionalParam[T any](r mcp.CallToolRequest, p string) (T, error) { var zero T // Check if the parameter is present in the request - if _, ok := r.Params.Arguments[p]; !ok { + args := r.GetArguments() + if _, ok := args[p]; !ok { return zero, nil } // Check if the parameter is of the expected type - if _, ok := r.Params.Arguments[p].(T); !ok { - return zero, fmt.Errorf("parameter %s is not of type %T, is %T", p, zero, r.Params.Arguments[p]) + if _, ok := args[p].(T); !ok { + return zero, fmt.Errorf("parameter %s is not of type %T, is %T", p, zero, args[p]) } - return r.Params.Arguments[p].(T), nil + return args[p].(T), nil } // OptionalIntParam is a helper function that can be used to fetch a requested parameter from the request. @@ -239,11 +243,12 @@ func OptionalIntParamWithDefault(r mcp.CallToolRequest, p string, d int) (int, e // 2. If it is present, iterates the elements and checks each is a string func OptionalStringArrayParam(r mcp.CallToolRequest, p string) ([]string, error) { // Check if the parameter is present in the request - if _, ok := r.Params.Arguments[p]; !ok { + args := r.GetArguments() + if _, ok := args[p]; !ok { return []string{}, nil } - switch v := r.Params.Arguments[p].(type) { + switch v := args[p].(type) { case nil: return []string{}, nil case []string: @@ -259,7 +264,7 @@ func OptionalStringArrayParam(r mcp.CallToolRequest, p string) ([]string, error) } return strSlice, nil default: - return []string{}, fmt.Errorf("parameter %s could not be coerced to []string, is %T", p, r.Params.Arguments[p]) + return []string{}, fmt.Errorf("parameter %s could not be coerced to []string, is %T", p, args[p]) } } diff --git a/mcp_servers/github/sse_server.go b/mcp_servers/github/server.go similarity index 55% rename from mcp_servers/github/sse_server.go rename to mcp_servers/github/server.go index c7ac17dc..93f6e17a 100644 --- a/mcp_servers/github/sse_server.go +++ b/mcp_servers/github/server.go @@ -2,6 +2,8 @@ package main import ( "context" + "encoding/base64" + "encoding/json" "fmt" "net/http" "os" @@ -12,39 +14,68 @@ import ( "github.com/github/github-mcp-server/pkg/github" "github.com/github/github-mcp-server/pkg/translations" gogithub "github.com/google/go-github/v69/github" + "github.com/joho/godotenv" "github.com/mark3labs/mcp-go/server" log "github.com/sirupsen/logrus" ) // Define request context key type for safety type contextKey string + const tokenContextKey contextKey = "auth_token" -func runSSEServer() error { +func extractAccessToken(r *http.Request) string { + // First try AUTH_DATA environment variable + authData := os.Getenv("AUTH_DATA") + + if authData == "" { + // Extract from x-auth-data header + headerData := r.Header.Get("x-auth-data") + if headerData != "" { + // Decode base64 + decoded, err := base64.StdEncoding.DecodeString(headerData) + if err != nil { + log.WithError(err).Warn("Failed to decode base64 auth data") + return "" + } + authData = string(decoded) + } + } + + if authData == "" { + return "" + } + + // Try to parse as JSON + var authJSON map[string]interface{} + if err := json.Unmarshal([]byte(authData), &authJSON); err != nil { + log.WithError(err).Warn("Failed to parse auth data JSON") + return "" + } + + // Extract access_token field + if accessToken, ok := authJSON["access_token"].(string); ok { + return accessToken + } + + return "" +} + +func runServer() error { // Create app context ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM) defer stop() t, _ := translations.TranslationHelper() - // Get auth token from environment variable - envAuthToken := os.Getenv("GITHUB_AUTH_TOKEN") - if envAuthToken != "" { - log.Info("Using auth token from environment variable") - } - // Create a context function to extract the token from request headers contextFunc := func(ctx context.Context, r *http.Request) context.Context { - // If env auth token is set, use it directly - if envAuthToken != "" { - return context.WithValue(ctx, tokenContextKey, envAuthToken) - } - - // Otherwise fall back to header token - token := r.Header.Get("x-auth-token") + // Extract from x-auth-data header + token := extractAccessToken(r) if token != "" { return context.WithValue(ctx, tokenContextKey, token) } + return ctx } @@ -75,19 +106,37 @@ func runSSEServer() error { if baseURL == "" { baseURL = fmt.Sprintf("http://localhost:%s", port) } - + // Create a multiplexer to handle multiple handlers + mux := http.NewServeMux() + httpServer := &http.Server{ + Addr: ":" + port, + Handler: mux, + } // Create servers with context function - ghServer := github.NewServer(getClient, "", true, t) - sseServer := server.NewSSEServer(ghServer, + ghServer := github.NewServer(getClient, "", false, t) + sseServer := server.NewSSEServer(ghServer, server.WithBaseURL(baseURL), server.WithSSEContextFunc(contextFunc), + server.WithHTTPServer(httpServer), + ) + streamableHttpServer := server.NewStreamableHTTPServer(ghServer, + server.WithHTTPContextFunc(contextFunc), + server.WithStreamableHTTPServer(httpServer), + server.WithStateLess(true), ) - + + // Register handlers on different paths + mux.Handle("/sse", sseServer) + mux.Handle("/message", sseServer) + mux.Handle("/mcp/", streamableHttpServer) + // Start the server with a goroutine serverErr := make(chan error, 1) go func() { - log.Printf("SSE server listening on :%s", port) - serverErr <- sseServer.Start(":" + port) + log.Printf("Server listening on :%s", port) + if err := httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + serverErr <- err + } }() // Wait for termination signal or server error @@ -99,8 +148,8 @@ func runSSEServer() error { // timeout context for shutdown shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() - sseServer.Shutdown(shutdownCtx) - + httpServer.Shutdown(shutdownCtx) + log.Info("Server gracefully stopped") } @@ -108,7 +157,8 @@ func runSSEServer() error { } func main() { - if err := runSSEServer(); err != nil { + _ = godotenv.Load(".env") + if err := runServer(); err != nil { log.Fatalf("Server error: %v", err) } } diff --git a/mcp_servers/gmail/Dockerfile b/mcp_servers/gmail/Dockerfile new file mode 100644 index 00000000..809e0c4f --- /dev/null +++ b/mcp_servers/gmail/Dockerfile @@ -0,0 +1,35 @@ +# Use a Node.js image as the base for building the application +FROM node:22-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/gmail/package.json mcp_servers/gmail/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/gmail . + +# Build the application using TypeScript +RUN npm run build + +# Use a smaller Node.js image for the final image +FROM node:22-slim AS release + +# Set the working directory inside the container +WORKDIR /app + +# Copy the built application from the builder stage +COPY --from=builder /app/build /app/build +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json +# Install only production dependencies +RUN npm ci --omit=dev --ignore-scripts + +EXPOSE 5000 + +# Specify the command to run the application +ENTRYPOINT ["node", "build/src/index.js"] \ No newline at end of file diff --git a/mcp_servers/gmail/README.md b/mcp_servers/gmail/README.md new file mode 100644 index 00000000..b65f794a --- /dev/null +++ b/mcp_servers/gmail/README.md @@ -0,0 +1,95 @@ +# Gmail MCP Server + +A Model Context Protocol (MCP) server for Gmail integration. Read, send, and manage emails through Gmail's API with full OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Gmail with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GMAIL", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/gmail-mcp-server:latest + + +# Run Gmail MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gmail-mcp-server:latest + +# Run Gmail MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_gmail_access_token_here"}' \ + ghcr.io/klavis-ai/gmail-mcp-server:latest +``` + +**OAuth Setup:** Gmail requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +### Email Operations +- **Email Reading**: Fetch emails, search messages, get message details +- **Email Sending**: Send new emails with attachments and rich formatting +- **Email Management**: Mark as read/unread, archive, delete emails +- **Label Management**: Apply, remove, and manage Gmail labels +- **Thread Management**: Handle email conversations and threads +- **Attachments**: Download and extract content from email attachments (PDF, Word, Excel, images, etc.) + +### Contact Search +- **Search Contacts**: Search for contacts by name or email address using `gmail_search_contacts` + - Supports four contact types: + - `all` (default): Searches all sources in parallel, returns three independent result sets (personal, other, directory) each with its own pagination token + - `personal`: Searches your saved contacts + - `other`: Searches other contact sources (Gmail suggestions, etc.) + - `directory`: Searches domain directory and domain contacts (requires directory.readonly scope) + - Directory source options (only used with `directory` or `all` type): + - `UNSPECIFIED`: Searches both DOMAIN_PROFILE and DOMAIN_CONTACT (default) + - `DOMAIN_DIRECTORY`: Searches domain profiles only + - `DOMAIN_CONTACTS`: Searches domain contacts only + - Flexible query matching against names, email addresses, and phone numbers + - Returns contact details including display name, email addresses, phone numbers, and organizations + - **Pagination**: When `contactType` is `all`, returns separate pagination tokens for each source type, allowing flexible independent pagination + - Paginated results with configurable page size (max 30 for personal/other, max 500 for directory) + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/gmail/package-lock.json b/mcp_servers/gmail/package-lock.json new file mode 100644 index 00000000..182857f7 --- /dev/null +++ b/mcp_servers/gmail/package-lock.json @@ -0,0 +1,2465 @@ +{ + "name": "@klavis-ai/gmail-mcp-server", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/gmail-mcp-server", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "exceljs": "^4.4.0", + "express": "^5.1.0", + "googleapis": "^129.0.0", + "mammoth": "^1.11.0", + "pdf-parse": "^1.1.1", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.22.1" + }, + "bin": { + "gmail-mcp": "build/index.js" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^20.10.5", + "@types/pdf-parse": "^1.1.5", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@fast-csv/format": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/@fast-csv/format/-/format-4.3.5.tgz", + "integrity": "sha512-8iRn6QF3I8Ak78lNAa+Gdl5MJJBM5vRHivFtMRUWINdevNo00K7OXxS2PshawLKTejVwieIlPmK5YlLu6w4u8A==", + "license": "MIT", + "dependencies": { + "@types/node": "^14.0.1", + "lodash.escaperegexp": "^4.1.2", + "lodash.isboolean": "^3.0.3", + "lodash.isequal": "^4.5.0", + "lodash.isfunction": "^3.0.9", + "lodash.isnil": "^4.0.0" + } + }, + "node_modules/@fast-csv/format/node_modules/@types/node": { + "version": "14.18.63", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.63.tgz", + "integrity": "sha512-fAtCfv4jJg+ExtXhvCkCqUKZ+4ok/JQk01qDKhL5BDDoS3AxKXhV5/MAVUZyQnSEd2GT92fkgZl0pz0Q0AzcIQ==", + "license": "MIT" + }, + "node_modules/@fast-csv/parse": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/@fast-csv/parse/-/parse-4.3.6.tgz", + "integrity": "sha512-uRsLYksqpbDmWaSmzvJcuApSEe38+6NQZBUsuAyMZKqHxH0g1wcJgsKUvN3WC8tewaqFjBMMGrkHmC+T7k8LvA==", + "license": "MIT", + "dependencies": { + "@types/node": "^14.0.1", + "lodash.escaperegexp": "^4.1.2", + "lodash.groupby": "^4.6.0", + "lodash.isfunction": "^3.0.9", + "lodash.isnil": "^4.0.0", + "lodash.isundefined": "^3.0.1", + "lodash.uniq": "^4.5.0" + } + }, + "node_modules/@fast-csv/parse/node_modules/@types/node": { + "version": "14.18.63", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.63.tgz", + "integrity": "sha512-fAtCfv4jJg+ExtXhvCkCqUKZ+4ok/JQk01qDKhL5BDDoS3AxKXhV5/MAVUZyQnSEd2GT92fkgZl0pz0Q0AzcIQ==", + "license": "MIT" + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.1.tgz", + "integrity": "sha512-8q6+9aF0yA39/qWT/uaIj6zTpC+Qu07DnN/lb9mjoquCJsAh6l3HyYqc9O3t2j7GilseOQOQimLg7W3By6jqvg==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.1.tgz", + "integrity": "sha512-UZUw8vjpWFXuDnjFTh7/5c2TWDlQqeXHi6hcN7F2XSVT5P+WmUnnbFS3KA6Jnc6IsEqI2qCVu2bK0R0J4A8ZQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.17.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.10.tgz", + "integrity": "sha512-/jrvh5h6NXhEauFFexRin69nA0uHJ5gwk4iDivp/DeoEua3uwCUto6PC86IpRITBOs4+6i2I56K5x5b6WYGXHA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/pdf-parse": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@types/pdf-parse/-/pdf-parse-1.1.5.tgz", + "integrity": "sha512-kBfrSXsloMnUJOKi25s3+hRmkycHfLK6A09eRGqF/N8BkQoPUmaCr+q8Cli5FnfohEz/rsv82zAiPz/LXtOGhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/archiver": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.2.tgz", + "integrity": "sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==", + "license": "MIT", + "dependencies": { + "archiver-utils": "^2.1.0", + "async": "^3.2.4", + "buffer-crc32": "^0.2.1", + "readable-stream": "^3.6.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^2.2.0", + "zip-stream": "^4.1.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/archiver-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-2.1.0.tgz", + "integrity": "sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==", + "license": "MIT", + "dependencies": { + "glob": "^7.1.4", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash.defaults": "^4.2.0", + "lodash.difference": "^4.5.0", + "lodash.flatten": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.union": "^4.6.0", + "normalize-path": "^3.0.0", + "readable-stream": "^2.0.0" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/archiver/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "license": "MIT" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "license": "MIT", + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bluebird": { + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz", + "integrity": "sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==", + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, + "node_modules/buffer-indexof-polyfill": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz", + "integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "engines": { + "node": ">=0.2.0" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "license": "MIT/X11", + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, + "node_modules/compress-commons": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-4.1.2.tgz", + "integrity": "sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==", + "license": "MIT", + "dependencies": { + "buffer-crc32": "^0.2.13", + "crc32-stream": "^4.0.2", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/compress-commons/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "license": "Apache-2.0", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crc32-stream": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-4.0.3.tgz", + "integrity": "sha512-NT7w2JVU7DFroFdYkeq8cywxrgjPHWkdX1wjpRQXPX5Asews3tA+Ght6lddQO5Mkumffp3X7GEqku3epj2toIw==", + "license": "MIT", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^3.4.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/crc32-stream/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/dayjs": { + "version": "1.11.13", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", + "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dingbat-to-unicode": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz", + "integrity": "sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w==", + "license": "BSD-2-Clause" + }, + "node_modules/duck": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/duck/-/duck-0.1.12.tgz", + "integrity": "sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg==", + "license": "BSD", + "dependencies": { + "underscore": "^1.13.1" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/duplexer2": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", + "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", + "license": "BSD-3-Clause", + "dependencies": { + "readable-stream": "^2.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.2.tgz", + "integrity": "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/exceljs": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/exceljs/-/exceljs-4.4.0.tgz", + "integrity": "sha512-XctvKaEMaj1Ii9oDOqbW/6e1gXknSY4g/aLCDicOXqBE4M0nRWkUu0PTp++UPNzoFY12BNHMfs/VadKIS6llvg==", + "license": "MIT", + "dependencies": { + "archiver": "^5.0.0", + "dayjs": "^1.8.34", + "fast-csv": "^4.3.1", + "jszip": "^3.10.1", + "readable-stream": "^3.6.0", + "saxes": "^5.0.1", + "tmp": "^0.2.0", + "unzipper": "^0.10.11", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=8.3.0" + } + }, + "node_modules/exceljs/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/exceljs/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fast-csv": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fast-csv/-/fast-csv-4.3.6.tgz", + "integrity": "sha512-2RNSpuwwsJGP0frGsOmTb9oUF+VkFSM4SyLTDgwf2ciHWTarN0lQTC+F2f/t5J9QjW+c65VFIAAu85GsvMIusw==", + "license": "MIT", + "dependencies": { + "@fast-csv/format": "4.3.5", + "@fast-csv/parse": "4.3.6" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, + "node_modules/fstream": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", + "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.2", + "inherits": "~2.0.0", + "mkdirp": ">=0.5 0", + "rimraf": "2" + }, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.6.tgz", + "integrity": "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "dunder-proto": "^1.0.0", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/google-auth-library": { + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", + "license": "Apache-2.0", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/googleapis": { + "version": "129.0.0", + "resolved": "https://registry.npmjs.org/googleapis/-/googleapis-129.0.0.tgz", + "integrity": "sha512-gFatrzby+oh/GxEeMhJOKzgs9eG7yksRcTon9b+kPie4ZnDSgGQ85JgtUaBtLSBkcKpUKukdSP6Km1aCjs4y4Q==", + "license": "Apache-2.0", + "dependencies": { + "google-auth-library": "^9.0.0", + "googleapis-common": "^7.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/googleapis-common": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/googleapis-common/-/googleapis-common-7.2.0.tgz", + "integrity": "sha512-/fhDZEJZvOV3X5jmD+fKxMqma5q2Q9nZNSF3kn1F18tpxmA86BcTxAGBQdM0N89Z3bEaIs+HVznSmFJEAmMTjA==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "gaxios": "^6.0.3", + "google-auth-library": "^9.7.0", + "qs": "^6.7.0", + "url-template": "^2.0.8", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "license": "MIT", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "license": "MIT" + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "license": "MIT", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "license": "(MIT OR GPL-3.0-or-later)", + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "license": "MIT", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "license": "MIT", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/listenercount": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz", + "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==", + "license": "ISC" + }, + "node_modules/lodash.defaults": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==", + "license": "MIT" + }, + "node_modules/lodash.difference": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz", + "integrity": "sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA==", + "license": "MIT" + }, + "node_modules/lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==", + "license": "MIT" + }, + "node_modules/lodash.flatten": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", + "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==", + "license": "MIT" + }, + "node_modules/lodash.groupby": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz", + "integrity": "sha512-5dcWxm23+VAoz+awKmBaiBvzox8+RqMgFhi7UvX9DHZr2HdxHXM/Wrf8cfKpsW37RNrvtPn6hSwNqurSILbmJw==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", + "license": "MIT" + }, + "node_modules/lodash.isfunction": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz", + "integrity": "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==", + "license": "MIT" + }, + "node_modules/lodash.isnil": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isnil/-/lodash.isnil-4.0.0.tgz", + "integrity": "sha512-up2Mzq3545mwVnMhTDMdfoG1OurpA/s5t88JmQX809eH3C8491iu2sfKhTfhQtKY78oPNhiaHJUpT/dUDAAtng==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "license": "MIT" + }, + "node_modules/lodash.isundefined": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz", + "integrity": "sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA==", + "license": "MIT" + }, + "node_modules/lodash.union": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz", + "integrity": "sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==", + "license": "MIT" + }, + "node_modules/lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", + "license": "MIT" + }, + "node_modules/lop": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/lop/-/lop-0.4.2.tgz", + "integrity": "sha512-RefILVDQ4DKoRZsJ4Pj22TxE3omDO47yFpkIBoDKzkqPRISs5U1cnAdg/5583YPkWPaLIYHOKRMQSvjFsO26cw==", + "license": "BSD-2-Clause", + "dependencies": { + "duck": "^0.1.12", + "option": "~0.2.1", + "underscore": "^1.13.1" + } + }, + "node_modules/mammoth": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/mammoth/-/mammoth-1.11.0.tgz", + "integrity": "sha512-BcEqqY/BOwIcI1iR5tqyVlqc3KIaMRa4egSoK83YAVrBf6+yqdAAbtUcFDCWX8Zef8/fgNZ6rl4VUv+vVX8ddQ==", + "license": "BSD-2-Clause", + "dependencies": { + "@xmldom/xmldom": "^0.8.6", + "argparse": "~1.0.3", + "base64-js": "^1.5.1", + "bluebird": "~3.4.0", + "dingbat-to-unicode": "^1.0.1", + "jszip": "^3.7.1", + "lop": "^0.4.2", + "path-is-absolute": "^1.0.0", + "underscore": "^1.13.1", + "xmlbuilder": "^10.0.0" + }, + "bin": { + "mammoth": "bin/mammoth" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-ensure": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/node-ensure/-/node-ensure-0.0.0.tgz", + "integrity": "sha512-DRI60hzo2oKN1ma0ckc6nQWlHU69RH6xN0sjQTjMpChPfTYvKZdcQFfdYK2RWbJcKyUizSIy/l8OTGxMAM1QDw==", + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/option": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/option/-/option-0.2.4.tgz", + "integrity": "sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A==", + "license": "BSD-2-Clause" + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/pdf-parse": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pdf-parse/-/pdf-parse-1.1.1.tgz", + "integrity": "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A==", + "license": "MIT", + "dependencies": { + "debug": "^3.1.0", + "node-ensure": "^0.0.0" + }, + "engines": { + "node": ">=6.8.1" + } + }, + "node_modules/pdf-parse/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/router/node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/saxes": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", + "license": "ISC", + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "license": "MIT" + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "license": "BSD-3-Clause" + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar-stream/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "license": "MIT", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "license": "MIT/X11", + "engines": { + "node": "*" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", + "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/underscore": { + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", + "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==", + "license": "MIT" + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/unzipper": { + "version": "0.10.14", + "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.14.tgz", + "integrity": "sha512-ti4wZj+0bQTiX2KmKWuwj7lhV+2n//uXEotUmGuQqrbVZSEGFMbI68+c6JCQ8aAmUWYvtHEz2A8K6wXvueR/6g==", + "license": "MIT", + "dependencies": { + "big-integer": "^1.6.17", + "binary": "~0.3.0", + "bluebird": "~3.4.1", + "buffer-indexof-polyfill": "~1.0.0", + "duplexer2": "~0.1.4", + "fstream": "^1.0.12", + "graceful-fs": "^4.2.2", + "listenercount": "~1.0.1", + "readable-stream": "~2.3.6", + "setimmediate": "~1.0.4" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-template": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz", + "integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==", + "license": "BSD" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/xmlbuilder": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-10.1.1.tgz", + "integrity": "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==", + "license": "MIT", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "license": "MIT" + }, + "node_modules/zip-stream": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.1.tgz", + "integrity": "sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==", + "license": "MIT", + "dependencies": { + "archiver-utils": "^3.0.4", + "compress-commons": "^4.1.2", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/zip-stream/node_modules/archiver-utils": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-3.0.4.tgz", + "integrity": "sha512-KVgf4XQVrTjhyWmx6cte4RxonPLR9onExufI1jhvw/MQ4BB6IsZD5gT8Lq+u/+pRkWna/6JoHpiQioaqFP5Rzw==", + "license": "MIT", + "dependencies": { + "glob": "^7.2.3", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash.defaults": "^4.2.0", + "lodash.difference": "^4.5.0", + "lodash.flatten": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.union": "^4.6.0", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/zip-stream/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/zod": { + "version": "3.24.1", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.1.tgz", + "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", + "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/gmail/package.json b/mcp_servers/gmail/package.json new file mode 100644 index 00000000..a342599a --- /dev/null +++ b/mcp_servers/gmail/package.json @@ -0,0 +1,41 @@ +{ + "name": "@klavis-ai/gmail-mcp-server", + "version": "1.0.0", + "description": "Gmail MCP server", + "type": "module", + "main": "build/index.js", + "bin": { + "gmail-mcp": "./build/index.js" + }, + "scripts": { + "build": "tsc", + "start": "node build/src/index.js", + "prepare": "npm run build", + "prepublishOnly": "npm run build" + }, + "files": [ + "dist", + "README.md" + ], + "author": "Klavis AI", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "googleapis": "^129.0.0", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.22.1", + "express": "^5.1.0", + "pdf-parse": "^1.1.1", + "mammoth": "^1.11.0", + "exceljs": "^4.4.0" + }, + "devDependencies": { + "@types/node": "^20.10.5", + "@types/express": "^5.0.0", + "@types/pdf-parse": "^1.1.5", + "typescript": "^5.3.3" + } +} diff --git a/mcp_servers/gmail/src/index.ts b/mcp_servers/gmail/src/index.ts new file mode 100644 index 00000000..657e4746 --- /dev/null +++ b/mcp_servers/gmail/src/index.ts @@ -0,0 +1,1311 @@ +#!/usr/bin/env node + +import express, { Request, Response } from 'express'; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + CallToolRequestSchema, + ListToolsRequestSchema, +} from "@modelcontextprotocol/sdk/types.js"; +import { google } from 'googleapis'; +import { z } from "zod"; +import { zodToJsonSchema } from "zod-to-json-schema"; +import { createEmailMessage, extractPdfText, extractDocxText, extractXlsxText } from "./utl.js"; +import { AsyncLocalStorage } from 'async_hooks'; + +// Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + gmailClient: any; + peopleClient: any; +}>(); + +// Type definitions for Gmail API responses +interface GmailMessagePart { + partId?: string; + mimeType?: string; + filename?: string; + headers?: Array<{ + name: string; + value: string; + }>; + body?: { + attachmentId?: string; + size?: number; + data?: string; + }; + parts?: GmailMessagePart[]; +} + +interface EmailAttachment { + id: string; + filename: string; + mimeType: string; + size: number; +} + +interface EmailContent { + text: string; + html: string; +} + +// Convert base64url (Gmail) -> standard base64 +function base64UrlToBase64(input: string): string { + let output = input.replace(/-/g, '+').replace(/_/g, '/'); + const padLen = output.length % 4; + if (padLen === 2) output += '=='; + else if (padLen === 3) output += '='; + else if (padLen === 1) output += '==='; // extremely rare, but safe guard + return output; +} + +// Helper function to get Gmail client from context +function getGmailClient() { + return asyncLocalStorage.getStore()!.gmailClient; +} + +// Helper function to get People client from context +function getPeopleClient() { + return asyncLocalStorage.getStore()!.peopleClient; +} + +/** + * Send warmup request with empty query to update the cache. + * + * According to Google's documentation, searchContacts and otherContacts.search + * require a warmup request before actual searches for better performance. + * See: https://developers.google.com/people/v1/contacts#search_the_users_contacts + * and https://developers.google.com/people/v1/other-contacts#search_the_users_other_contacts + */ +async function warmupContactSearch(peopleClient: any, contactType: 'personal' | 'other'): Promise { + try { + if (contactType === 'personal') { + // Warmup for people.searchContacts + await peopleClient.people.searchContacts({ + query: '', + pageSize: 1, + readMask: 'names', + }); + console.log('Warmup request sent for personal contacts'); + } else if (contactType === 'other') { + // Warmup for otherContacts.search + await peopleClient.otherContacts.search({ + query: '', + pageSize: 1, + readMask: 'names', + }); + console.log('Warmup request sent for other contacts'); + } + } catch (error) { + // Don't fail if warmup fails, just log it + console.warn(`Warmup request failed for ${contactType} contacts:`, error); + } +} + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Gmail access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +/** + * Recursively extract email body content from MIME message parts + * Handles complex email structures with nested parts + */ +function extractEmailContent(messagePart: GmailMessagePart): EmailContent { + // Initialize containers for different content types + let textContent = ''; + let htmlContent = ''; + + // If the part has a body with data, process it based on MIME type + if (messagePart.body && messagePart.body.data) { + const content = Buffer.from(messagePart.body.data, 'base64').toString('utf8'); + + // Store content based on its MIME type + if (messagePart.mimeType === 'text/plain') { + textContent = content; + } else if (messagePart.mimeType === 'text/html') { + htmlContent = content; + } + } + + // If the part has nested parts, recursively process them + if (messagePart.parts && messagePart.parts.length > 0) { + for (const part of messagePart.parts) { + const { text, html } = extractEmailContent(part); + if (text) textContent += text; + if (html) htmlContent += html; + } + } + + // Return both plain text and HTML content + return { text: textContent, html: htmlContent }; +} + +// Schema definitions +const SendEmailSchema = z.object({ + to: z.array(z.string()).describe("List of recipient email addresses. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), + subject: z.string().describe("Email subject"), + body: z.string().describe("Email body content (used for text/plain or when htmlBody not provided)"), + htmlBody: z.string().optional().describe("HTML version of the email body"), + mimeType: z.enum(['text/plain', 'text/html', 'multipart/alternative']).optional().default('text/plain').describe("Email content type"), + cc: z.array(z.string()).optional().describe("List of CC recipients. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), + bcc: z.array(z.string()).optional().describe("List of BCC recipients. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), + threadId: z.string().optional().describe("Thread ID to reply to"), + inReplyTo: z.string().optional().describe("Message ID being replied to"), +}); + +const ReadEmailSchema = z.object({ + messageId: z.string().describe("ID of the email message to retrieve"), +}); + +const SearchEmailsSchema = z.object({ + query: z.string().describe("Gmail search query (e.g., 'from:example@gmail.com')"), + maxResults: z.number().optional().describe("Maximum number of results to return"), +}); + +// Updated schema to include removeLabelIds +const ModifyEmailSchema = z.object({ + messageId: z.string().describe("ID of the email message to modify"), + addLabelIds: z.array(z.string()).optional().describe("List of label IDs to add to the message"), + removeLabelIds: z.array(z.string()).optional().describe("List of label IDs to remove from the message"), +}); + +const DeleteEmailSchema = z.object({ + messageId: z.string().describe("ID of the email message to delete"), +}); + +// Schema for searching contacts +const SearchContactsSchema = z.object({ + query: z.string().describe("The plain-text search query for contact names, email addresses, phone numbers, etc."), + contactType: z.enum(['all', 'personal', 'other', 'directory']).optional().default('all').describe("Type of contacts to search: 'all' (search all types - returns three separate result sets with independent pagination tokens), 'personal' (your saved contacts), 'other' (other contact sources like Gmail suggestions), or 'directory' (domain directory)"), + pageSize: z.number().optional().default(10).describe("Number of results to return. For personal/other: max 30, for directory: max 500"), + pageToken: z.string().optional().describe("Page token for pagination (used with directory searches)"), + directorySources: z.enum(['UNSPECIFIED', 'DOMAIN_DIRECTORY', 'DOMAIN_CONTACTS']).optional().default('UNSPECIFIED').describe("Directory sources to search (only used for directory type)") +}); + +// Schema for getting attachments of an email +const GetEmailAttachmentsSchema = z.object({ + messageId: z.string().describe("ID of the email message to retrieve attachments for"), +}); + +// Schemas for batch operations +const BatchModifyEmailsSchema = z.object({ + messageIds: z.array(z.string()).describe("List of message IDs to modify"), + addLabelIds: z.array(z.string()).optional().describe("List of label IDs to add to all messages"), + removeLabelIds: z.array(z.string()).optional().describe("List of label IDs to remove from all messages"), + batchSize: z.number().optional().default(50).describe("Number of messages to process in each batch (default: 50)"), +}); + +const BatchDeleteEmailsSchema = z.object({ + messageIds: z.array(z.string()).describe("List of message IDs to delete"), + batchSize: z.number().optional().default(50).describe("Number of messages to process in each batch (default: 50)"), +}); + +// Get Gmail MCP Server +const getGmailMcpServer = () => { + // Server implementation + const server = new Server({ + name: "gmail", + version: "1.0.0", + }, { + capabilities: { + tools: {}, + }, + }); + + // Tool handlers + server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [ + { + name: "gmail_send_email", + description: "Sends a new email. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided.", + inputSchema: zodToJsonSchema(SendEmailSchema), + annotations: { category: "GMAIL_EMAIL" }, + }, + { + name: "gmail_draft_email", + description: "Draft a new email", + inputSchema: zodToJsonSchema(SendEmailSchema), + annotations: { category: "GMAIL_EMAIL" }, + }, + { + name: "gmail_read_email", + description: "Retrieves the content of a specific email and all messages in its thread. Returns a structured list of emails with individual metadata (messageId, subject, from, to, date, body, attachments) for each message in the conversation.", + inputSchema: zodToJsonSchema(ReadEmailSchema), + annotations: { category: "GMAIL_EMAIL", readOnlyHint: true }, + }, + { + name: "gmail_search_emails", + description: "Searches for emails using Gmail search syntax", + inputSchema: zodToJsonSchema(SearchEmailsSchema), + annotations: { category: "GMAIL_EMAIL", readOnlyHint: true }, + }, + { + name: "gmail_modify_email", + description: "Modifies email labels (move to different folders)", + inputSchema: zodToJsonSchema(ModifyEmailSchema), + annotations: { category: "GMAIL_EMAIL" }, + }, + { + name: "gmail_delete_email", + description: "Permanently deletes an email", + inputSchema: zodToJsonSchema(DeleteEmailSchema), + annotations: { category: "GMAIL_EMAIL" }, + }, + { + name: "gmail_batch_modify_emails", + description: "Modifies labels for multiple emails in batches", + inputSchema: zodToJsonSchema(BatchModifyEmailsSchema), + annotations: { category: "GMAIL_BATCH_EMAIL" }, + }, + { + name: "gmail_batch_delete_emails", + description: "Permanently deletes multiple emails in batches", + inputSchema: zodToJsonSchema(BatchDeleteEmailsSchema), + annotations: { category: "GMAIL_BATCH_EMAIL" }, + }, + { + name: "gmail_get_email_attachments", + description: "Returns attachments for an email by message ID. Extracts and returns text for PDFs, Word (.docx), and Excel (.xlsx); returns inline text for text/JSON/XML; returns base64 for images/audio; otherwise returns a data URI reference.", + inputSchema: zodToJsonSchema(GetEmailAttachmentsSchema), + annotations: { category: "GMAIL_EMAIL", readOnlyHint: true }, + }, + { + name: "gmail_search_contacts", + description: "Search for contacts when you need to know the contact details. Supports searching personal contacts, other contact sources, domain directory, or all sources simultaneously. When contactType is 'all' (default), returns three separate result sets (personal, other, directory) each with independent pagination tokens for flexible paginated access to individual sources.", + inputSchema: zodToJsonSchema(SearchContactsSchema), + annotations: { category: "GMAIL_CONTACTS", readOnlyHint: true }, + }, + ], + })); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + const gmail = getGmailClient(); + + async function handleEmailAction(action: "send" | "draft", validatedArgs: any) { + const message = createEmailMessage(validatedArgs); + + const encodedMessage = Buffer.from(message).toString('base64') + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=+$/, ''); + + // Define the type for messageRequest + interface GmailMessageRequest { + raw: string; + threadId?: string; + } + + const messageRequest: GmailMessageRequest = { + raw: encodedMessage, + }; + + // Add threadId if specified + if (validatedArgs.threadId) { + messageRequest.threadId = validatedArgs.threadId; + } + + if (action === "send") { + const response = await gmail.users.messages.send({ + userId: 'me', + requestBody: messageRequest, + }); + const resultPayload = { + message: `Email sent successfully with ID: ${response.data.id}`, + messageId: response.data.id ?? null, + }; + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } else { + const response = await gmail.users.drafts.create({ + userId: 'me', + requestBody: { + message: messageRequest, + }, + }); + const resultPayload = { + message: `Email draft created successfully with ID: ${response.data.id}`, + draftId: response.data.id ?? null, + }; + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + } + + // Helper function to process operations in batches + async function processBatches( + items: T[], + batchSize: number, + processFn: (batch: T[]) => Promise + ): Promise<{ successes: U[], failures: { item: T, error: Error }[] }> { + const successes: U[] = []; + const failures: { item: T, error: Error }[] = []; + + // Process in batches + for (let i = 0; i < items.length; i += batchSize) { + const batch = items.slice(i, i + batchSize); + try { + const results = await processFn(batch); + successes.push(...results); + } catch (error) { + // If batch fails, try individual items + for (const item of batch) { + try { + const result = await processFn([item]); + successes.push(...result); + } catch (itemError) { + failures.push({ item, error: itemError as Error }); + } + } + } + } + + return { successes, failures }; + } + + try { + + switch (name) { + case "gmail_send_email": + case "gmail_draft_email": { + const validatedArgs = SendEmailSchema.parse(args); + const action = name === "gmail_send_email" ? "send" : "draft"; + return await handleEmailAction(action, validatedArgs); + } + + case "gmail_read_email": { + const validatedArgs = ReadEmailSchema.parse(args); + const response = await gmail.users.messages.get({ + userId: 'me', + id: validatedArgs.messageId, + format: 'full', + }); + + const threadId = response.data.threadId || ''; + + // Get all messages in the thread + const threadResponse = await gmail.users.threads.get({ + userId: 'me', + id: threadId, + format: 'full', + }); + + const threadMessages = threadResponse.data.messages || []; + + // Process each message in the thread + const emails = threadMessages.map((msg: any) => { + const headers = msg.payload?.headers || []; + const subject = headers.find((h: any) => h.name?.toLowerCase() === 'subject')?.value || ''; + const from = headers.find((h: any) => h.name?.toLowerCase() === 'from')?.value || ''; + const to = headers.find((h: any) => h.name?.toLowerCase() === 'to')?.value || ''; + const cc = headers.find((h: any) => h.name?.toLowerCase() === 'cc')?.value || ''; + const date = headers.find((h: any) => h.name?.toLowerCase() === 'date')?.value || ''; + const messageId = msg.id || ''; + + // Extract email content using the recursive function + const { text, html } = extractEmailContent(msg.payload as GmailMessagePart || {}); + + // Get attachment information + const attachments: EmailAttachment[] = []; + const processAttachmentParts = (part: GmailMessagePart, path: string = '') => { + if (part.body && part.body.attachmentId) { + const filename = part.filename || `attachment-${part.body.attachmentId}`; + attachments.push({ + id: part.body.attachmentId, + filename: filename, + mimeType: part.mimeType || 'application/octet-stream', + size: part.body.size || 0 + }); + } + + if (part.parts) { + part.parts.forEach((subpart: GmailMessagePart) => + processAttachmentParts(subpart, `${path}/parts`) + ); + } + }; + + if (msg.payload) { + processAttachmentParts(msg.payload as GmailMessagePart); + } + + const preferredFormat = text ? 'text/plain' : (html ? 'text/html' : null); + + return { + messageId, + subject, + from, + to, + cc: cc || undefined, + date, + body: { + text: text || '', + html: html || '', + preferredFormat, + }, + attachments: attachments.length > 0 ? attachments : undefined, + }; + }); + + const resultPayload = { + threadId, + messageCount: emails.length, + emails, + }; + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + case "gmail_search_emails": { + const validatedArgs = SearchEmailsSchema.parse(args); + const response = await gmail.users.messages.list({ + userId: 'me', + q: validatedArgs.query, + maxResults: validatedArgs.maxResults || 10, + }); + + const messages = response.data.messages || []; + const results = await Promise.all( + messages.map(async (msg: any) => { + const detail = await gmail.users.messages.get({ + userId: 'me', + id: msg.id!, + format: 'metadata', + metadataHeaders: ['Subject', 'From', 'Date'], + }); + const headers = detail.data.payload?.headers || []; + return { + id: msg.id, + subject: headers.find((h: any) => h.name === 'Subject')?.value || '', + from: headers.find((h: any) => h.name === 'From')?.value || '', + date: headers.find((h: any) => h.name === 'Date')?.value || '', + }; + }) + ); + + return { + content: [ + { + type: "text", + text: JSON.stringify(results, null, 2), + }, + ], + }; + } + + case "gmail_modify_email": { + const validatedArgs = ModifyEmailSchema.parse(args); + + // Prepare request body + const requestBody: any = {}; + + if (validatedArgs.addLabelIds) { + requestBody.addLabelIds = validatedArgs.addLabelIds; + } + + if (validatedArgs.removeLabelIds) { + requestBody.removeLabelIds = validatedArgs.removeLabelIds; + } + + await gmail.users.messages.modify({ + userId: 'me', + id: validatedArgs.messageId, + requestBody: requestBody, + }); + const resultPayload: Record = { + message: `Email ${validatedArgs.messageId} labels updated successfully`, + messageId: validatedArgs.messageId, + }; + if (validatedArgs.addLabelIds) { + resultPayload.addedLabels = validatedArgs.addLabelIds; + } + if (validatedArgs.removeLabelIds) { + resultPayload.removedLabels = validatedArgs.removeLabelIds; + } + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + case "gmail_delete_email": { + const validatedArgs = DeleteEmailSchema.parse(args); + await gmail.users.messages.delete({ + userId: 'me', + id: validatedArgs.messageId, + }); + const resultPayload = { + message: `Email ${validatedArgs.messageId} deleted successfully`, + messageId: validatedArgs.messageId, + }; + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + case "gmail_batch_modify_emails": { + const validatedArgs = BatchModifyEmailsSchema.parse(args); + const messageIds = validatedArgs.messageIds; + const batchSize = validatedArgs.batchSize || 50; + + // Prepare request body + const requestBody: any = {}; + + if (validatedArgs.addLabelIds) { + requestBody.addLabelIds = validatedArgs.addLabelIds; + } + + if (validatedArgs.removeLabelIds) { + requestBody.removeLabelIds = validatedArgs.removeLabelIds; + } + + // Process messages in batches + const { successes, failures } = await processBatches( + messageIds, + batchSize, + async (batch) => { + const results = await Promise.all( + batch.map(async (messageId) => { + const result = await gmail.users.messages.modify({ + userId: 'me', + id: messageId, + requestBody: requestBody, + }); + return { messageId, success: true }; + }) + ); + return results; + } + ); + + // Generate summary of the operation + const successCount = successes.length; + const failureCount = failures.length; + const failureDetails = failures.map(({ item, error }) => ({ + messageId: typeof item === 'string' ? item : String(item), + error: error.message, + })); + const resultPayload: Record = { + message: "Batch label modification complete.", + successCount, + failureCount, + }; + if (failureDetails.length > 0) { + resultPayload.failures = failureDetails; + } + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + case "gmail_batch_delete_emails": { + const validatedArgs = BatchDeleteEmailsSchema.parse(args); + const messageIds = validatedArgs.messageIds; + const batchSize = validatedArgs.batchSize || 50; + + // Process messages in batches + const { successes, failures } = await processBatches( + messageIds, + batchSize, + async (batch) => { + const results = await Promise.all( + batch.map(async (messageId) => { + await gmail.users.messages.delete({ + userId: 'me', + id: messageId, + }); + return { messageId, success: true }; + }) + ); + return results; + } + ); + + // Generate summary of the operation + const successCount = successes.length; + const failureCount = failures.length; + const failureDetails = failures.map(({ item, error }) => ({ + messageId: typeof item === 'string' ? item : String(item), + error: error.message, + })); + const resultPayload: Record = { + message: "Batch delete operation complete.", + successCount, + failureCount, + }; + if (failureDetails.length > 0) { + resultPayload.failures = failureDetails; + } + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + case "gmail_get_email_attachments": { + const validatedArgs = GetEmailAttachmentsSchema.parse(args); + const messageId = validatedArgs.messageId; + + + // Get the message in full to inspect parts and attachment IDs + const messageResponse = await gmail.users.messages.get({ + userId: 'me', + id: messageId, + format: 'full', + }); + + const attachmentsMeta: Array<{ + attachmentId: string; + filename: string; + mimeType: string; + size: number; + }> = []; + + const collectAttachments = (part: GmailMessagePart | undefined) => { + if (!part) return; + if (part.body && part.body.attachmentId) { + attachmentsMeta.push({ + attachmentId: part.body.attachmentId, + filename: part.filename || `attachment-${part.body.attachmentId}`, + mimeType: part.mimeType || 'application/octet-stream', + size: part.body.size || 0, + }); + } + if (part.parts && part.parts.length) { + part.parts.forEach(collectAttachments); + } + }; + + collectAttachments(messageResponse.data.payload as GmailMessagePart | undefined); + + + if (attachmentsMeta.length === 0) { + const resultPayload = { + message: `No attachments found for message ${messageId}`, + messageId, + attachmentCount: 0, + }; + return { + content: [ + { + type: 'text', + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } + + // Retrieve each attachment's data and emit real content + const attachmentContents = await Promise.all( + attachmentsMeta.map(async (meta) => { + const att = await gmail.users.messages.attachments.get({ + userId: 'me', + messageId, + id: meta.attachmentId, + }); + const base64Url = att.data.data || ''; + const base64 = base64UrlToBase64(base64Url); + + const mime = meta.mimeType || 'application/octet-stream'; + const commonMeta = { + attachmentId: meta.attachmentId, + filename: meta.filename, + mimeType: mime, + size: meta.size, + }; + const asJsonText = (extra: Record) => ({ + type: 'text' as const, + text: JSON.stringify( + { + ...commonMeta, + ...extra, + }, + null, + 2 + ), + }); + + // Handle PDF files using pdf-parse + if (mime === 'application/pdf' || meta.filename.toLowerCase().endsWith('.pdf')) { + const pdfText = await extractPdfText(base64, meta.filename); + + return { + type: 'text' as const, + text: pdfText, + }; + } + + // Handle Word DOCX files using mammoth + if (mime === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' || meta.filename.toLowerCase().endsWith('.docx')) { + const docxText = await extractDocxText(base64, meta.filename); + return { + type: 'text' as const, + text: docxText, + }; + } + + // Handle Excel XLSX files using exceljs; legacy .xls is not supported + if ( + mime === 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' || + meta.filename.toLowerCase().endsWith('.xlsx') + ) { + const xlsxText = await extractXlsxText(base64, meta.filename); + return { + type: 'text' as const, + text: xlsxText, + }; + } + if ( + mime === 'application/vnd.ms-excel' || + meta.filename.toLowerCase().endsWith('.xls') + ) { + return { + type: 'text' as const, + text: `[Info] Attachment ${meta.filename}: legacy .xls format is not supported for text extraction. Please convert to .xlsx and retry.`, + }; + } + + if (mime.startsWith('text/') || + ['application/json', 'application/xml', 'application/javascript', 'application/typescript'].includes(mime)) { + const text = Buffer.from(base64, 'base64').toString('utf8'); + return { + type: 'text' as const, + text, + }; + } + + if (mime.startsWith('image/')) { + return { + type: 'image' as const, + data: base64, + mimeType: mime, + name: meta.filename, + }; + } + + if (mime.startsWith('audio/')) { + return { + type: 'audio' as const, + data: base64, + mimeType: mime, + name: meta.filename, + }; + } + + // Fallback for other binaries: return a data URI reference in text + const dataUri = `data:${mime};base64,${base64}`; + return { + type: 'text' as const, + text: `Attachment: ${meta.filename} (${mime}, ${meta.size} bytes)\n${dataUri}`, + }; + }) + ); + + // Optionally prepend a short summary line + const summary = { + type: 'text' as const, + text: JSON.stringify( + { + message: `Attachments for message ${messageId}`, + messageId, + attachmentCount: attachmentsMeta.length, + attachments: attachmentsMeta, + }, + null, + 2 + ), + }; + + return { + content: [summary, ...attachmentContents], + }; + } + + case "gmail_search_contacts": { + const validatedArgs = SearchContactsSchema.parse(args); + const peopleClient = getPeopleClient(); + const contactType = validatedArgs.contactType || 'personal'; + + try { + let response: any; + let results: any[] = []; + let typeLabel = ''; + + if (contactType === 'all') { + typeLabel = 'contact(s) from all sources'; + // Send warmup requests for personal and other contacts + await Promise.all([ + warmupContactSearch(peopleClient, 'personal'), + warmupContactSearch(peopleClient, 'other'), + ]); + // Execute all three searches in parallel + const [personalRes, otherRes, directoryRes] = await Promise.all([ + // Personal contacts + peopleClient.people.searchContacts({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 30), + readMask: 'names,emailAddresses,organizations,phoneNumbers,metadata', + }), + // Other contacts + peopleClient.otherContacts.search({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 30), + readMask: 'emailAddresses,metadata,names,phoneNumbers', + }), + // Directory contacts + peopleClient.people.searchDirectoryPeople({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 500), + readMask: 'names,emailAddresses,organizations,phoneNumbers,metadata', + sources: ['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE', 'DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + }), + ]); + + // Process personal results + const personalResults = (personalRes.data.results || []).map((result: any) => { + const person = result.person || {}; + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + const orgs = person.organizations || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + contactType: 'personal', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'other', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'other', + })), + organizations: orgs.map((o: any) => ({ + name: o.name, + title: o.title, + })), + }; + }); + + // Process other results + const otherResults = (otherRes.data.results || []).map((result: any) => { + const person = result.person || {}; + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + contactType: 'other', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'other', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'other', + })), + organizations: [], + }; + }); + + // Process directory results + const directoryResults = (directoryRes.data.people || []).map((person: any) => { + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + const orgs = person.organizations || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + contactType: 'directory', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'work', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'work', + })), + organizations: orgs.map((o: any) => ({ + name: o.name, + title: o.title, + })), + }; + }); + + // Return three independent result sets with pagination info + const resultPayload = { + message: `Found contacts matching "${validatedArgs.query}" from all sources`, + query: validatedArgs.query, + contactType: 'all', + personal: { + resultCount: personalResults.length, + nextPageToken: (personalRes.data as any).nextPageToken || undefined, + contacts: personalResults, + }, + other: { + resultCount: otherResults.length, + nextPageToken: (otherRes.data as any).nextPageToken || undefined, + contacts: otherResults, + }, + directory: { + resultCount: directoryResults.length, + nextPageToken: (directoryRes.data as any).nextPageToken || undefined, + contacts: directoryResults, + }, + }; + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + + } else if (contactType === 'personal') { + typeLabel = 'personal contact(s)'; + // Send warmup request before search + await warmupContactSearch(peopleClient, 'personal'); + response = await peopleClient.people.searchContacts({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 30), + readMask: 'names,emailAddresses,organizations,phoneNumbers,metadata', + }); + + results = (response.data.results || []).map((result: any) => { + const person = result.person || {}; + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + const orgs = person.organizations || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'other', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'other', + })), + organizations: orgs.map((o: any) => ({ + name: o.name, + title: o.title, + })), + }; + }); + } else if (contactType === 'other') { + typeLabel = 'other contact(s)'; + // Send warmup request before search + await warmupContactSearch(peopleClient, 'other'); + response = await peopleClient.otherContacts.search({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 30), + readMask: 'emailAddresses,metadata,names,phoneNumbers', + }); + + results = (response.data.results || []).map((result: any) => { + const person = result.person || {}; + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'other', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'other', + })), + organizations: [], + }; + }); + } else if (contactType === 'directory') { + typeLabel = 'directory contact(s)'; + const sourceMap: { [key: string]: string[] } = { + 'UNSPECIFIED': ['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE', 'DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + 'DOMAIN_DIRECTORY': ['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE'], + 'DOMAIN_CONTACTS': ['DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + }; + const directorySources = sourceMap[validatedArgs.directorySources || 'UNSPECIFIED']; + + response = await peopleClient.people.searchDirectoryPeople({ + query: validatedArgs.query, + pageSize: Math.min(validatedArgs.pageSize || 10, 500), + readMask: 'names,emailAddresses,organizations,phoneNumbers,metadata', + sources: directorySources, + pageToken: validatedArgs.pageToken, + }); + + results = (response.data.people || []).map((person: any) => { + const names = person.names || []; + const emails = person.emailAddresses || []; + const phones = person.phoneNumbers || []; + const orgs = person.organizations || []; + + return { + resourceName: person.resourceName, + displayName: names.length > 0 ? names[0].displayName : 'Unknown', + firstName: names.length > 0 ? names[0].givenName : '', + lastName: names.length > 0 ? names[0].familyName : '', + emailAddresses: emails.map((e: any) => ({ + email: e.value, + type: e.type || 'work', + })), + phoneNumbers: phones.map((p: any) => ({ + number: p.value, + type: p.type || 'work', + })), + organizations: orgs.map((o: any) => ({ + name: o.name, + title: o.title, + })), + }; + }); + } + + const resultPayload = { + message: `Found ${results.length} ${typeLabel} matching "${validatedArgs.query}"`, + query: validatedArgs.query, + contactType: contactType, + resultCount: results.length, + contacts: results, + }; + + return { + content: [ + { + type: "text", + text: JSON.stringify(resultPayload, null, 2), + }, + ], + }; + } catch (error: any) { + throw new Error(`Failed to search ${contactType} contacts: ${error.message}`); + } + } + + default: + throw new Error(`Unknown tool: ${name}`); + } + } catch (error: any) { + const errorPayload = { error: error.message }; + return { + content: [ + { + type: "text", + text: JSON.stringify(errorPayload, null, 2), + }, + ], + }; + } + }); + + return server; +}; + +// Create Express App +const app = express(); + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const accessToken = extractAccessToken(req); + + // Initialize Gmail and People clients with the access token + const auth = new google.auth.OAuth2(); + auth.setCredentials({ access_token: accessToken }); + const gmailClient = google.gmail({ version: 'v1', auth }); + const peopleClient = google.people({ version: 'v1', auth }); + + const server = getGmailMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ gmailClient, peopleClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +// Map to store SSE transports +const transports = new Map(); + +app.get("/sse", async (req: Request, res: Response) => { + const accessToken = extractAccessToken(req); + + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getGmailMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req: Request, res: Response) => { + const sessionId = req.query.sessionId as string; + const accessToken = extractAccessToken(req); + + let transport: SSEServerTransport | undefined; + transport = sessionId ? transports.get(sessionId) : undefined; + if (transport) { + // Initialize Gmail and People clients with the access token + const auth = new google.auth.OAuth2(); + auth.setCredentials({ access_token: accessToken }); + const gmailClient = google.gmail({ version: 'v1', auth }); + const peopleClient = google.people({ version: 'v1', auth }); + + asyncLocalStorage.run({ gmailClient, peopleClient }, async () => { + await transport!.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +// Start the server +const PORT = process.env.PORT || 5000; +app.listen(PORT, () => { + console.log(`Server running on port ${PORT}`); +}); + diff --git a/mcp_servers/gmail/src/utl.ts b/mcp_servers/gmail/src/utl.ts new file mode 100644 index 00000000..fdaf566f --- /dev/null +++ b/mcp_servers/gmail/src/utl.ts @@ -0,0 +1,207 @@ +/** + * Helper function to encode email headers containing non-ASCII characters + * according to RFC 2047 MIME specification + */ +function encodeEmailHeader(text: string): string { + // Only encode if the text contains non-ASCII characters + if (/[^\x00-\x7F]/.test(text)) { + // Use MIME Words encoding (RFC 2047) + return '=?UTF-8?B?' + Buffer.from(text).toString('base64') + '?='; + } + return text; +} + +export const validateEmail = (email: string): boolean => { + const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; + return emailRegex.test(email); +}; + +export function createEmailMessage(validatedArgs: any): string { + const encodedSubject = encodeEmailHeader(validatedArgs.subject); + // Determine content type based on available content and explicit mimeType + let mimeType = validatedArgs.mimeType || 'text/plain'; + + // If htmlBody is provided and mimeType isn't explicitly set to text/plain, + // use multipart/alternative to include both versions + if (validatedArgs.htmlBody && mimeType !== 'text/plain') { + mimeType = 'multipart/alternative'; + } + + // Generate a random boundary string for multipart messages + const boundary = `----=_NextPart_${Math.random().toString(36).substring(2)}`; + + // Validate email addresses + (validatedArgs.to as string[]).forEach(email => { + if (!validateEmail(email)) { + throw new Error(`Recipient email address is invalid: ${email}`); + } + }); + + // Common email headers + const emailParts = [ + 'From: me', + `To: ${validatedArgs.to.join(', ')}`, + validatedArgs.cc ? `Cc: ${validatedArgs.cc.join(', ')}` : '', + validatedArgs.bcc ? `Bcc: ${validatedArgs.bcc.join(', ')}` : '', + `Subject: ${encodedSubject}`, + // Add thread-related headers if specified + validatedArgs.inReplyTo ? `In-Reply-To: ${validatedArgs.inReplyTo}` : '', + validatedArgs.inReplyTo ? `References: ${validatedArgs.inReplyTo}` : '', + 'MIME-Version: 1.0', + ].filter(Boolean); + + // Construct the email based on the content type + if (mimeType === 'multipart/alternative') { + // Multipart email with both plain text and HTML + emailParts.push(`Content-Type: multipart/alternative; boundary="${boundary}"`); + emailParts.push(''); + + // Plain text part + emailParts.push(`--${boundary}`); + emailParts.push('Content-Type: text/plain; charset=UTF-8'); + emailParts.push('Content-Transfer-Encoding: 7bit'); + emailParts.push(''); + emailParts.push(validatedArgs.body); + emailParts.push(''); + + // HTML part + emailParts.push(`--${boundary}`); + emailParts.push('Content-Type: text/html; charset=UTF-8'); + emailParts.push('Content-Transfer-Encoding: 7bit'); + emailParts.push(''); + emailParts.push(validatedArgs.htmlBody || validatedArgs.body); // Use body as fallback + emailParts.push(''); + + // Close the boundary + emailParts.push(`--${boundary}--`); + } else if (mimeType === 'text/html') { + // HTML-only email + emailParts.push('Content-Type: text/html; charset=UTF-8'); + emailParts.push('Content-Transfer-Encoding: 7bit'); + emailParts.push(''); + emailParts.push(validatedArgs.htmlBody || validatedArgs.body); + } else { + // Plain text email (default) + emailParts.push('Content-Type: text/plain; charset=UTF-8'); + emailParts.push('Content-Transfer-Encoding: 7bit'); + emailParts.push(''); + emailParts.push(validatedArgs.body); + } + + return emailParts.join('\r\n'); +} + +/** + * Extracts text content from a PDF file encoded in base64 + * @param base64Data - The base64 encoded PDF data + * @param filename - The filename for better error messages + * @returns The extracted text content or an error message + */ +export async function extractPdfText(base64Data: string, filename: string): Promise { + try { + // Dynamically import internal implementation to avoid debug harness in index.js + // @ts-ignore - no type declarations for internal path + const pdfParse = (await import('pdf-parse/lib/pdf-parse.js')).default as any; + + // Convert base64 to buffer + const buffer = Buffer.from(base64Data, 'base64'); + + // Parse PDF and extract text + const data = await pdfParse(buffer); + + // Return extracted text with metadata + const result = [ + `=== PDF Content from ${filename} ===`, + `Pages: ${data.numpages}`, + ``, + `--- Text Content ---`, + data.text, + ``, + `=== End of PDF Content ===` + ].join('\n'); + + return result; + } catch (error) { + console.error(`Error extracting text from PDF ${filename}:`, error); + return `[Error: Unable to extract text from PDF "${filename}". The file may be corrupted, password-protected, or contain only images without text.]`; + } +} + +/** + * Extracts text content from a DOCX Word document encoded in base64 + * Uses the mammoth library to extract raw text + */ +export async function extractDocxText(base64Data: string, filename: string): Promise { + try { + // Dynamically import to avoid loading cost unless needed + const mammoth = await import('mammoth'); + + const buffer = Buffer.from(base64Data, 'base64'); + const result = await mammoth.extractRawText({ buffer }); + + const messages = (result.messages || []).map((m: any) => `- ${m.message || m.value || JSON.stringify(m)}`).join('\n'); + const text = result.value || ''; + + return [ + `=== Word (DOCX) Content from ${filename} ===`, + messages ? `Messages:\n${messages}\n` : '', + `--- Text Content ---`, + text, + '', + `=== End of Word Content ===` + ].filter(Boolean).join('\n'); + } catch (error) { + console.error(`Error extracting text from DOCX ${filename}:`, error); + return `[Error: Unable to extract text from Word file "${filename}". Ensure it is a .docx file. Legacy .doc format is not supported by mammoth.]`; + } +} + +/** + * Extracts text/CSV-like content from an Excel (.xlsx) file encoded in base64 + * Uses exceljs (actively maintained) and intentionally does not process legacy .xls + */ +export async function extractXlsxText(base64Data: string, filename: string): Promise { + try { + const ExcelJSImport = await import('exceljs'); + const ExcelJS: any = (ExcelJSImport as any).default ?? ExcelJSImport; + const buffer = Buffer.from(base64Data, 'base64'); + + const workbook = new ExcelJS.Workbook(); + await workbook.xlsx.load(buffer); + + const sheetTexts: string[] = []; + workbook.worksheets.forEach((worksheet: any) => { + const rowsOut: string[] = []; + worksheet.eachRow({ includeEmpty: false }, (row: any) => { + const values: any[] = Array.isArray(row.values) ? row.values.slice(1) : []; + const cells = values.map((v) => { + if (v === null || v === undefined) return ''; + if (typeof v === 'object') { + // Cell objects can be rich text, formulas, etc. + if (typeof v.text === 'string') return v.text; + if (typeof v.result !== 'undefined') return String(v.result); + if (typeof v.richText !== 'undefined') { + try { return v.richText.map((rt: any) => rt.text).join(''); } catch { return ''; } + } + return String(v.toString?.() ?? ''); + } + return String(v); + }); + rowsOut.push(cells.join(',')); + }); + sheetTexts.push([ + `Sheet: ${worksheet.name}`, + rowsOut.join('\n') + ].join('\n')); + }); + + return [ + `=== Excel Content from ${filename} ===`, + ...sheetTexts, + `=== End of Excel Content ===` + ].join('\n\n'); + } catch (error) { + console.error(`Error extracting text from Excel ${filename}:`, error); + return `[Error: Unable to extract content from Excel file "${filename}". The file may be corrupted or in an unsupported format.]`; + } +} \ No newline at end of file diff --git a/mcp_servers/gmail/tsconfig.json b/mcp_servers/gmail/tsconfig.json new file mode 100644 index 00000000..f14c1b0c --- /dev/null +++ b/mcp_servers/gmail/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "jsx": "react-jsx", + "outDir": "./build", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./**/*"], + "exclude": ["node_modules"] +} diff --git a/mcp_servers/gong/Dockerfile b/mcp_servers/gong/Dockerfile new file mode 100644 index 00000000..880b9204 --- /dev/null +++ b/mcp_servers/gong/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Install Python requirements +COPY mcp_servers/gong/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application source +COPY mcp_servers/gong/server.py ./ +COPY mcp_servers/gong/tools/ ./tools/ + +# Optionally copy example env file (not required for runtime) +# COPY mcp_servers/gong/.env.example .env + +EXPOSE 5000 + +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/gong/README.md b/mcp_servers/gong/README.md new file mode 100644 index 00000000..62ae2cfb --- /dev/null +++ b/mcp_servers/gong/README.md @@ -0,0 +1,78 @@ +# Gong MCP Server + +A Model Context Protocol (MCP) server for Gong integration. Access sales call analytics, conversation intelligence, and revenue insights using Gong's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Gong with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GONG", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/gong-mcp-server:latest + + +# Run Gong MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/gong-mcp-server:latest + + +# Run Gong MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_gong_access_token_here"}' \ + ghcr.io/klavis-ai/gong-mcp-server:latest +``` + +**OAuth Setup:** Gong requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Call Analytics**: Access call recordings and conversation analytics +- **Revenue Intelligence**: Get insights into sales performance and pipeline +- **Deal Analysis**: Analyze deal progression and win/loss factors +- **Rep Performance**: Track sales rep performance and coaching opportunities +- **Market Intelligence**: Access competitive and market insights + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/gong/requirements.txt b/mcp_servers/gong/requirements.txt new file mode 100644 index 00000000..7eda28a2 --- /dev/null +++ b/mcp_servers/gong/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +requests +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/gong/server.py b/mcp_servers/gong/server.py new file mode 100644 index 00000000..dcfa2eaa --- /dev/null +++ b/mcp_servers/gong/server.py @@ -0,0 +1,296 @@ +import contextlib +import base64 +import json +import logging +import os +from collections.abc import AsyncIterator +from typing import Any + +import click +import mcp.types as types +from dotenv import load_dotenv +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + auth_token_context, + extract_access_token, + get_transcripts_by_user, + get_call_transcripts, + get_extensive_data, + list_calls, + add_new_call, +) + +logger = logging.getLogger(__name__) + +load_dotenv() + +GONG_MCP_SERVER_PORT = int(os.getenv("GONG_MCP_SERVER_PORT", "5000")) + +@click.command() +@click.option("--port", default=GONG_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option("--log-level", default="INFO", help="Logging level") +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main(port: int, log_level: str, json_response: bool) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + app = Server("gong-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="gong_get_transcripts_by_user", + description=( + "Get call transcripts associated with a user by email address " + "including all participants on the call and their companies." + ), + inputSchema={ + "type": "object", + "required": ["user_email"], + "properties": { + "user_email": { + "type": "string", + "description": "Email address of the user.", + }, + "from_date": { + "type": "string", + "description": "ISO start datetime to filter calls (optional).", + }, + "to_date": { + "type": "string", + "description": "ISO end datetime to filter calls (optional).", + }, + "limit": { + "type": "integer", + "description": "Maximum number of calls to return (default 10).", + "default": 10, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GONG_TRANSCRIPT", "readOnlyHint": True}), + ), + types.Tool( + name="gong_get_extensive_data", + description="Lists detailed call data for specific call IDs (Gong /v2/calls/extensive).", + inputSchema={ + "type": "object", + "required": ["call_ids"], + "properties": { + "call_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of Gong call IDs (max 100).", + }, + "cursor": { + "type": "string", + "description": "Pagination cursor returned by previous request (optional).", + }, + "include_parties": { + "type": "boolean", + "description": "Include parties section (default true).", + "default": True, + }, + "include_transcript": { + "type": "boolean", + "description": "Include transcript in response (default false).", + "default": False, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GONG_CALL", "readOnlyHint": True}), + ), + types.Tool( + name="gong_get_call_transcripts", + description="Retrieve transcripts of specific calls (Gong /v2/calls/transcript).", + inputSchema={ + "type": "object", + "required": ["call_ids"], + "properties": { + "call_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of Gong call IDs (max 100).", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GONG_TRANSCRIPT", "readOnlyHint": True}), + ), + types.Tool( + name="gong_list_calls", + description="List calls within a date range (Gong /v2/calls).", + inputSchema={ + "type": "object", + "properties": { + "from_date": {"type": "string", "description": "ISO start datetime (optional)."}, + "to_date": {"type": "string", "description": "ISO end datetime (optional)."}, + "limit": {"type": "integer", "description": "Maximum calls to return (default 50).", "default": 50}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GONG_CALL", "readOnlyHint": True}), + ), + types.Tool( + name="gong_add_new_call", + description="Add a new call record to Gong (POST /v2/calls).", + inputSchema={ + "type": "object", + "required": ["call"], + "properties": { + "call": { + "type": "object", + "description": "Object containing Gong call payload as per API docs.", + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GONG_CALL"}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "gong_get_transcripts_by_user": + user_email = arguments.get("user_email") + if not user_email: + return [ + types.TextContent( + type="text", + text="Error: 'user_email' argument is required.", + ) + ] + + limit = arguments.get("limit", 10) + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + try: + result = await get_transcripts_by_user( + user_email=user_email, + from_date=from_date, + to_date=to_date, + limit=limit, + ) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + except Exception as e: + logger.exception("Error executing Gong tool %s: %s", name, e) + return [types.TextContent(type="text", text=f"Error: {e}")] + + elif name == "gong_get_extensive_data": + call_ids = arguments.get("call_ids") + cursor = arguments.get("cursor") + include_parties = arguments.get("include_parties", True) + include_transcript = arguments.get("include_transcript", False) + try: + result = await get_extensive_data(call_ids, cursor, include_parties, include_transcript) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception("Error executing Gong tool %s: %s", name, e) + return [types.TextContent(type="text", text=f"Error: {e}")] + + elif name == "gong_get_call_transcripts": + call_ids = arguments.get("call_ids") + try: + result = await get_call_transcripts(call_ids) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception("Error executing Gong tool %s: %s", name, e) + return [types.TextContent(type="text", text=f"Error: {e}")] + + elif name == "gong_list_calls": + limit = arguments.get("limit", 50) + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + try: + result = await list_calls(from_date, to_date, limit) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception("Error executing Gong tool %s: %s", name, e) + return [types.TextContent(type="text", text=f"Error: {e}")] + + elif name == "gong_add_new_call": + call_body = arguments.get("call") + try: + result = await add_new_call(call_body) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception("Error executing Gong tool %s: %s", name, e) + return [types.TextContent(type="text", text=f"Error: {e}")] + + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + auth_token = extract_access_token(request) + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse(request.scope, request.receive, request._send) as streams: + await app.run(streams[0], streams[1], app.create_initialization_options()) + finally: + auth_token_context.reset(token) + return Response() + + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + logger.info("Handling StreamableHTTP request") + auth_token = extract_access_token(scope) + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + async with session_manager.run(): + logger.info("Gong MCP Server started") + try: + yield + finally: + logger.info("Gong MCP Server shutting down") + + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info("Server listening on port %s", port) + logger.info("SSE endpoint: http://localhost:%s/sse", port) + logger.info("StreamableHTTP endpoint: http://localhost:%s/mcp", port) + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/gong/tools/__init__.py b/mcp_servers/gong/tools/__init__.py new file mode 100644 index 00000000..72f55805 --- /dev/null +++ b/mcp_servers/gong/tools/__init__.py @@ -0,0 +1,15 @@ +from .base import auth_token_context, extract_access_token +from .transcripts import get_transcripts_by_user +from .extensive import get_extensive_data +from .calls import list_calls, add_new_call +from .transcripts import get_call_transcripts + +__all__ = [ + "auth_token_context", + "extract_access_token", + "get_transcripts_by_user", + "get_call_transcripts", + "get_extensive_data", + "list_calls", + "add_new_call", +] \ No newline at end of file diff --git a/mcp_servers/gong/tools/base.py b/mcp_servers/gong/tools/base.py new file mode 100644 index 00000000..bce2eab0 --- /dev/null +++ b/mcp_servers/gong/tools/base.py @@ -0,0 +1,79 @@ +import logging +import base64 +import json +import os +from typing import Any, Dict +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +GONG_API_ENDPOINT = "https://api.gong.io/" + +# Context variable to store the basic auth header for each request +# We expect the server to set this value from the incoming HTTP header "x-auth-token", +# where the value is already formatted as "Basic ". +# This mirrors the pattern used by the Linear server for easy re-use by callers. +auth_token_context: ContextVar[str] = ContextVar("auth_token") + +def extract_access_token(request_or_scope) -> str: + """Extract access token from AUTH_DATA env var or x-auth-token header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get("x-auth-token") + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b"x-auth-token") + if auth_data: + auth_data = auth_data.decode("utf-8") + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + # If not JSON, assume it's the raw token (for backward compatibility with x-auth-token) + return auth_data + +def get_auth_header() -> str: + """Return the Authorization header value stored in the context, or raise.""" + try: + return auth_token_context.get() + except LookupError: # pragma: no cover + raise RuntimeError("Authentication token not found in request context") + +def build_headers(extra: Dict[str, str] | None = None) -> Dict[str, str]: + """Helper to construct request headers with Authorization and JSON content type.""" + headers: Dict[str, str] = { + "Authorization": get_auth_header(), + "Content-Type": "application/json", + } + if extra: + headers.update(extra) + return headers + +async def get(path: str, params: Dict[str, Any] | None = None) -> Dict[str, Any]: + """Perform a GET request to the Gong API and return JSON.""" + url = f"{GONG_API_ENDPOINT}{path}" + async with httpx.AsyncClient() as client: + resp = await client.get(url, params=params, headers=build_headers()) + resp.raise_for_status() + return resp.json() + +async def post(path: str, json_body: Dict[str, Any]) -> Dict[str, Any]: + """Perform a POST request to the Gong API and return JSON.""" + url = f"{GONG_API_ENDPOINT}{path}" + async with httpx.AsyncClient() as client: + resp = await client.post(url, json=json_body, headers=build_headers()) + resp.raise_for_status() + return resp.json() \ No newline at end of file diff --git a/mcp_servers/gong/tools/calls.py b/mcp_servers/gong/tools/calls.py new file mode 100644 index 00000000..baa4437a --- /dev/null +++ b/mcp_servers/gong/tools/calls.py @@ -0,0 +1,43 @@ +import logging +from datetime import datetime, timezone, timedelta +from typing import Any, Dict, Optional, List + +from .base import get, post + +logger = logging.getLogger(__name__) + +async def list_calls( + from_date: Optional[str] = None, + to_date: Optional[str] = None, + limit: int = 50, +) -> Dict[str, Any]: + """List calls in Gong between two datetimes (inclusive). + + If dates are not provided, defaults to the last 30 days. + """ + if not from_date: + from_date = ( + datetime.now(timezone.utc) - timedelta(days=30) + ).isoformat(timespec="seconds") + if not to_date: + to_date = datetime.now(timezone.utc).isoformat(timespec="seconds") + + params = { + "fromDateTime": from_date, + "toDateTime": to_date, + "limit": limit, + } + logger.info("Listing calls from %s to %s (limit=%s)", from_date, to_date, limit) + return await get("/v2/calls", params=params) + +async def add_new_call(call_data: Dict[str, Any]) -> Dict[str, Any]: + """Add a new call record to Gong. + + The required structure of `call_data` is documented in the Gong API. At a minimum, + you typically need start/end timestamps, parties, and a downloadMediaUrl. + """ + if not call_data: + raise ValueError("call_data cannot be empty") + + logger.info("Adding new call to Gong") + return await post("/v2/calls", call_data) \ No newline at end of file diff --git a/mcp_servers/gong/tools/extensive.py b/mcp_servers/gong/tools/extensive.py new file mode 100644 index 00000000..829677b5 --- /dev/null +++ b/mcp_servers/gong/tools/extensive.py @@ -0,0 +1,51 @@ +import logging +from typing import Any, Dict, List, Optional + +from .base import post + +logger = logging.getLogger(__name__) + +async def get_extensive_data( + call_ids: List[str], + cursor: Optional[str] = None, + include_parties: bool = True, + include_transcript: bool = False, +) -> Dict[str, Any]: + """Retrieve extensive call data for one or more call IDs. + + Parameters + ---------- + call_ids : list[str] + List of Gong call IDs to fetch. + cursor : str, optional + Pagination cursor returned by a previous request. + include_parties : bool, optional + Whether to include party metadata in the response. + include_transcript : bool, optional + Whether to include transcript in the response (may be large). + """ + + if not call_ids: + raise ValueError("call_ids list cannot be empty") + + logger.info("Executing get_extensive_data for %s calls", len(call_ids)) + + exposed_fields: Dict[str, Any] = { + "content": {}, + } + if include_parties: + exposed_fields["parties"] = True + if include_transcript: + exposed_fields["content"]["transcript"] = True + + payload: Dict[str, Any] = { + "callIds": call_ids, + "contentSelector": { + "context": "Extended", + "exposedFields": exposed_fields, + }, + } + if cursor: + payload["cursor"] = cursor + + return await post("/v2/calls/extensive", payload) \ No newline at end of file diff --git a/mcp_servers/gong/tools/transcripts.py b/mcp_servers/gong/tools/transcripts.py new file mode 100644 index 00000000..ff586736 --- /dev/null +++ b/mcp_servers/gong/tools/transcripts.py @@ -0,0 +1,88 @@ +import logging +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional, List + +from .base import post + +logger = logging.getLogger(__name__) + +async def get_transcripts_by_user( + user_email: str, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + limit: int = 10, +) -> Dict[str, Any]: + """Retrieve call transcripts for calls that involve the given user email. + + The function makes one request to the /v2/calls/extensive endpoint with a filter that + matches the provided email address in the parties list. For each call it asks Gong to + include both the transcript and the parties so that callers can determine which company + participants belong to. + + Parameters + ---------- + user_email : str + Email address of the user whose calls we want to fetch. + from_date : str, optional + ISO-8601 date-time string for the start of the time window. If omitted, defaults + to 30 days in the past. + to_date : str, optional + ISO-8601 date-time string for the end of the time window. If omitted, defaults + to now. + limit : int, optional + Maximum number of calls to return (Gong caps pagination at 100 per page). + """ + + logger.info( + "Executing Gong tool get_transcripts_by_user for %s (limit=%s)", + user_email, + limit, + ) + + if not from_date: + from_date = ( + datetime.now(timezone.utc) - timedelta(days=30) + ).isoformat(timespec="seconds") + if not to_date: + to_date = datetime.now(timezone.utc).isoformat(timespec="seconds") + + payload: Dict[str, Any] = { + "contentSelector": { + "context": "Extended", + "exposedFields": { + "content": {"transcript": True}, + "parties": True, + }, + }, + "filter": { + "fromDateTime": from_date, + "toDateTime": to_date, + "parties": { + "emailAddress": {"eq": user_email} + }, + }, + "limit": limit, + } + + response = await post("/v2/calls/extensive", payload) + + # The API returns { "calls": [...] }. Return the whole response for maximum flexibility. + return response + +async def get_call_transcripts(call_ids: List[str]) -> Dict[str, Any]: + """Retrieve transcripts for specific call IDs. + + Parameters + ---------- + call_ids : list[str] + Gong call IDs whose transcripts should be fetched (max 100 per API call). + """ + if not call_ids: + raise ValueError("call_ids list cannot be empty") + + payload = { + "callIds": call_ids, + } + + logger.info("Retrieving transcripts for %s calls", len(call_ids)) + return await post("/v2/calls/transcript", payload) \ No newline at end of file diff --git a/mcp_servers/google_calendar/.env.example b/mcp_servers/google_calendar/.env.example new file mode 100644 index 00000000..f289a222 --- /dev/null +++ b/mcp_servers/google_calendar/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +GOOGLE_CALENDAR_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/google_calendar/Dockerfile b/mcp_servers/google_calendar/Dockerfile new file mode 100644 index 00000000..747c2aa4 --- /dev/null +++ b/mcp_servers/google_calendar/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/google_calendar/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/google_calendar/server.py . + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/google_calendar/README.md b/mcp_servers/google_calendar/README.md new file mode 100644 index 00000000..8067a415 --- /dev/null +++ b/mcp_servers/google_calendar/README.md @@ -0,0 +1,78 @@ +# Google Calendar MCP Server + +A Model Context Protocol (MCP) server for Google Calendar integration. Manage events, calendars, and scheduling using Google Calendar API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Calendar with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_CALENDAR", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-calendar-mcp-server:latest + + +# Run Google Calendar MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-calendar-mcp-server:latest + + +# Run Google Calendar MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_google_access_token_here"}' \ + ghcr.io/klavis-ai/google-calendar-mcp-server:latest +``` + +**OAuth Setup:** Google Calendar requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Event Management**: Create, read, update, and delete calendar events +- **Calendar Operations**: Manage multiple calendars and calendar settings +- **Scheduling**: Handle meeting scheduling and availability +- **Recurring Events**: Manage recurring events and series +- **Attendee Management**: Invite attendees and manage responses + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_calendar/requirements.txt b/mcp_servers/google_calendar/requirements.txt new file mode 100644 index 00000000..35a7d09a --- /dev/null +++ b/mcp_servers/google_calendar/requirements.txt @@ -0,0 +1,13 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +google-auth +google-auth-oauthlib +google-auth-httplib2 +google-api-python-client +click +starlette +packaging diff --git a/mcp_servers/google_calendar/server.py b/mcp_servers/google_calendar/server.py new file mode 100644 index 00000000..3fc47203 --- /dev/null +++ b/mcp_servers/google_calendar/server.py @@ -0,0 +1,1796 @@ +import contextlib +import base64 +import logging +import os +import json +import uuid +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar +from enum import Enum +from datetime import datetime, timedelta +from zoneinfo import ZoneInfo + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +GOOGLE_CALENDAR_MCP_SERVER_PORT = int(os.getenv("GOOGLE_CALENDAR_MCP_SERVER_PORT", "5000")) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +# Define enums that are referenced in context.py +class EventVisibility(Enum): + DEFAULT = "default" + PUBLIC = "public" + PRIVATE = "private" + +class SendUpdatesOptions(Enum): + ALL = "all" + EXTERNAL_ONLY = "externalOnly" + NONE = "none" + +# Error class for retryable errors +class RetryableToolError(Exception): + def __init__(self, message: str, additional_prompt_content: str = "", retry_after_ms: int = 1000, developer_message: str = ""): + super().__init__(message) + self.additional_prompt_content = additional_prompt_content + self.retry_after_ms = retry_after_ms + self.developer_message = developer_message + +def get_calendar_service(access_token: str): + """Create Google Calendar service with access token.""" + credentials = Credentials(token=access_token) + return build('calendar', 'v3', credentials=credentials) + +def get_people_service(access_token: str): + """Create Google People service with access token.""" + credentials = Credentials(token=access_token) + return build('people', 'v1', credentials=credentials) + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +def parse_datetime(datetime_string: str, time_zone: str) -> datetime: + """Parse datetime string to datetime object with timezone.""" + try: + # Try to parse as ISO format + dt = datetime.fromisoformat(datetime_string.replace('Z', '+00:00')) + # Convert to specified timezone if not already timezone-aware + if dt.tzinfo is None: + tz = ZoneInfo(time_zone) + dt = dt.replace(tzinfo=tz) + return dt + except ValueError: + raise ValueError(f"Invalid datetime format: {datetime_string}") + +# Context class to mock the context.get_auth_token_or_empty() calls +class Context: + def get_auth_token_or_empty(self) -> str: + return get_auth_token() + +context = Context() + +async def list_calendars( + max_results: int = 10, + show_deleted: bool = False, + show_hidden: bool = False, + next_page_token: str | None = None, +) -> Dict[str, Any]: + """List all calendars accessible by the user.""" + logger.info(f"Executing tool: list_calendars with max_results: {max_results}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + max_results = max(1, min(max_results, 250)) + calendars = ( + service.calendarList() + .list( + pageToken=next_page_token, + showDeleted=show_deleted, + showHidden=show_hidden, + maxResults=max_results, + ) + .execute() + ) + + items = calendars.get("items", []) + keys = ["description", "id", "summary", "timeZone"] + relevant_items = [{k: i.get(k) for k in keys if i.get(k)} for i in items] + return { + "next_page_token": calendars.get("nextPageToken"), + "num_calendars": len(relevant_items), + "calendars": relevant_items, + } + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool list_calendars: {e}") + raise e + +async def create_event( + summary: str, + start_datetime: str, + end_datetime: str, + calendar_id: str = "primary", + description: str | None = None, + location: str | None = None, + visibility: str = "default", + attendees: list[str] | None = None, + send_updates: str = "all", + add_google_meet: bool = False, + recurrence: list[str] | None = None, +) -> Dict[str, Any]: + """Create a new event/meeting/sync/meetup in the specified calendar.""" + logger.info(f"Executing tool: create_event with summary: {summary}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + # Get the calendar's time zone + calendar = service.calendars().get(calendarId=calendar_id).execute() + time_zone = calendar["timeZone"] + + # Parse datetime strings + start_dt = parse_datetime(start_datetime, time_zone) + end_dt = parse_datetime(end_datetime, time_zone) + + event: Dict[str, Any] = { + "summary": summary, + "description": description, + "location": location, + "start": {"dateTime": start_dt.isoformat(), "timeZone": time_zone}, + "end": {"dateTime": end_dt.isoformat(), "timeZone": time_zone}, + "visibility": visibility, + } + + if attendees: + event["attendees"] = [{"email": email} for email in attendees] + + # Add recurrence rule if provided + if recurrence: + event["recurrence"] = recurrence + + # Add Google Meet conference if requested + if add_google_meet: + event["conferenceData"] = { + "createRequest": { + "requestId": str(uuid.uuid4()), + "conferenceSolutionKey": { + "type": "hangoutsMeet" + } + } + } + + # Set conferenceDataVersion to 1 when creating conferences + conference_data_version = 1 if add_google_meet else 0 + + created_event = service.events().insert( + calendarId=calendar_id, + body=event, + sendUpdates=send_updates, + conferenceDataVersion=conference_data_version + ).execute() + return {"event": created_event} + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_event: {e}") + raise e + +async def list_events( + min_end_datetime: str, + max_start_datetime: str, + calendar_id: str = "primary", + max_results: int = 10, +) -> Dict[str, Any]: + """List events from the specified calendar within the given datetime range.""" + logger.info(f"Executing tool: list_events from {min_end_datetime} to {max_start_datetime}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + # Get the calendar's time zone + calendar = service.calendars().get(calendarId=calendar_id).execute() + time_zone = calendar["timeZone"] + + # Parse datetime strings + min_end_dt = parse_datetime(min_end_datetime, time_zone) + max_start_dt = parse_datetime(max_start_datetime, time_zone) + + if min_end_dt > max_start_dt: + min_end_dt, max_start_dt = max_start_dt, min_end_dt + + events_result = ( + service.events() + .list( + calendarId=calendar_id, + timeMin=min_end_dt.isoformat(), + timeMax=max_start_dt.isoformat(), + maxResults=max_results, + singleEvents=True, + orderBy="startTime", + ) + .execute() + ) + + items_keys = [ + "attachments", + "attendees", + "creator", + "description", + "end", + "eventType", + "htmlLink", + "id", + "location", + "organizer", + "recurrence", + "recurringEventId", + "start", + "summary", + "visibility", + ] + + events = [ + {key: event[key] for key in items_keys if key in event} + for event in events_result.get("items", []) + ] + + return {"events_count": len(events), "events": events} + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool list_events: {e}") + raise e + +async def update_event( + event_id: str, + updated_start_datetime: str | None = None, + updated_end_datetime: str | None = None, + updated_summary: str | None = None, + updated_description: str | None = None, + updated_location: str | None = None, + updated_visibility: str | None = None, + attendees_to_add: list[str] | None = None, + attendees_to_remove: list[str] | None = None, + updated_recurrence: list[str] | None = None, + send_updates: str = "all", +) -> str: + """Update an existing event in the specified calendar with the provided details.""" + logger.info(f"Executing tool: update_event with event_id: {event_id}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + calendar = service.calendars().get(calendarId="primary").execute() + time_zone = calendar["timeZone"] + + try: + event = service.events().get(calendarId="primary", eventId=event_id).execute() + except HttpError: + valid_events_with_id = ( + service.events() + .list( + calendarId="primary", + timeMin=(datetime.now() - timedelta(days=2)).isoformat(), + timeMax=(datetime.now() + timedelta(days=365)).isoformat(), + maxResults=50, + singleEvents=True, + orderBy="startTime", + ) + .execute() + ) + raise RuntimeError(f"Event with ID {event_id} not found. Available events: {valid_events_with_id}") + + update_fields = {} + + if updated_start_datetime: + update_fields["start"] = {"dateTime": updated_start_datetime, "timeZone": time_zone} + + if updated_end_datetime: + update_fields["end"] = {"dateTime": updated_end_datetime, "timeZone": time_zone} + + if updated_summary: + update_fields["summary"] = updated_summary + + if updated_description: + update_fields["description"] = updated_description + + if updated_location: + update_fields["location"] = updated_location + + if updated_visibility: + update_fields["visibility"] = updated_visibility + + if updated_recurrence is not None: + # If updated_recurrence is an empty list, remove recurrence (convert to single event) + # If it has values, update the recurrence rule + update_fields["recurrence"] = updated_recurrence + + event.update({k: v for k, v in update_fields.items() if v is not None}) + + if attendees_to_remove: + event["attendees"] = [ + attendee + for attendee in event.get("attendees", []) + if attendee.get("email", "").lower() + not in [email.lower() for email in attendees_to_remove] + ] + + if attendees_to_add: + existing_emails = { + attendee.get("email", "").lower() for attendee in event.get("attendees", []) + } + new_attendees = [ + {"email": email} + for email in attendees_to_add + if email.lower() not in existing_emails + ] + event["attendees"] = event.get("attendees", []) + new_attendees + + updated_event = ( + service.events() + .update( + calendarId="primary", + eventId=event_id, + sendUpdates=send_updates, + body=event, + ) + .execute() + ) + return ( + f"Event with ID {event_id} successfully updated at {updated_event['updated']}. " + f"View updated event at {updated_event['htmlLink']}" + ) + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool update_event: {e}") + raise e + +async def add_attendees_to_event( + event_id: str, + attendees: list[str], + calendar_id: str = "primary", + send_updates: str = "all", +) -> str: + """Add attendees to an existing event in Google Calendar.""" + logger.info(f"Executing tool: add_attendees_to_event with event_id: {event_id}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + # Get the existing event + try: + event = service.events().get(calendarId=calendar_id, eventId=event_id).execute() + except HttpError: + valid_events_with_id = ( + service.events() + .list( + calendarId=calendar_id, + timeMin=(datetime.now() - timedelta(days=2)).isoformat(), + timeMax=(datetime.now() + timedelta(days=365)).isoformat(), + maxResults=50, + singleEvents=True, + orderBy="startTime", + ) + .execute() + ) + raise RuntimeError(f"Event with ID {event_id} not found. Available events: {valid_events_with_id}") + + # Get existing attendee emails (case-insensitive) + existing_emails = { + attendee.get("email", "").lower() for attendee in event.get("attendees", []) + } + + # Filter out emails that are already attendees + new_attendees = [ + {"email": email} + for email in attendees + if email.lower() not in existing_emails + ] + + if not new_attendees: + existing_attendee_list = [attendee.get("email", "") for attendee in event.get("attendees", [])] + return ( + f"No new attendees were added to event '{event_id}' because all specified emails " + f"are already attendees. Current attendees: {existing_attendee_list}" + ) + + # Add new attendees to the event + event["attendees"] = event.get("attendees", []) + new_attendees + + # Update the event + updated_event = ( + service.events() + .update( + calendarId=calendar_id, + eventId=event_id, + sendUpdates=send_updates, + body=event, + ) + .execute() + ) + + added_emails = [attendee["email"] for attendee in new_attendees] + notification_message = "" + if send_updates == "all": + notification_message = "Notifications were sent to all attendees." + elif send_updates == "externalOnly": + notification_message = "Notifications were sent to external attendees only." + elif send_updates == "none": + notification_message = "No notifications were sent to attendees." + + return ( + f"Successfully added {len(new_attendees)} new attendees to event '{event_id}': {', '.join(added_emails)}. " + f"{notification_message} View updated event at {updated_event['htmlLink']}" + ) + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool add_attendees_to_event: {e}") + raise e + +async def delete_event( + event_id: str, + calendar_id: str = "primary", + send_updates: str = "all", +) -> str: + """Delete an event from Google Calendar.""" + logger.info(f"Executing tool: delete_event with event_id: {event_id}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + service.events().delete( + calendarId=calendar_id, eventId=event_id, sendUpdates=send_updates + ).execute() + + notification_message = "" + if send_updates == "all": + notification_message = "Notifications were sent to all attendees." + elif send_updates == "externalOnly": + notification_message = "Notifications were sent to external attendees only." + elif send_updates == "none": + notification_message = "No notifications were sent to attendees." + + return ( + f"Event with ID '{event_id}' successfully deleted from calendar '{calendar_id}'. " + f"{notification_message}" + ) + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool delete_event: {e}") + raise e + +async def get_current_time() -> Dict[str, Any]: + """ + Get the current date and time using the user's Google Calendar timezone setting. + + This tool provides accurate current time information to prevent hallucinations + from LLM pre-training data. Always use this tool before scheduling events or + working with date/time operations. + """ + logger.info(f"Executing tool: get_current_time") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + # Get user's timezone setting from Google Calendar settings - https://developers.google.com/workspace/calendar/api/v3/reference/settings#resource + try: + timezone_setting = service.settings().get(setting='timezone').execute() + timezone = timezone_setting.get('value', 'UTC') + logger.info(f"Retrieved user timezone: {timezone}") + except Exception as e: + logger.error(f"Failed to retrieve user timezone: {e}") + raise RuntimeError(f"Failed to retrieve user timezone from Google Calendar: {e}") + + # Parse timezone + try: + tz = ZoneInfo(timezone) + except Exception as e: + logger.error(f"Invalid timezone {timezone}: {e}") + raise RuntimeError(f"Invalid timezone '{timezone}' received from Google Calendar: {e}") + + # Get current time in user's timezone + now = datetime.now(tz).replace(microsecond=0) + + return { + "datetime": now.strftime("%Y-%m-%dT%H:%M:%S"), + "timezone": timezone, + "date": now.strftime("%Y-%m-%d"), + "time": now.strftime("%H:%M:%S"), + "day_of_week": now.strftime("%A"), + } + except Exception as e: + logger.exception(f"Error executing tool get_current_time: {e}") + raise e + +async def find_free_slots( + items: list[str] | None = None, + time_min: str | None = None, + time_max: str | None = None, + timezone: str = "UTC", + min_slot_duration_minutes: int = 30, +) -> Dict[str, Any]: + """ + Find free and busy time slots for specified calendar users. + + Returns a simple structure with busy and free time slots for each user. + Defaults to the current day if time_min/time_max are omitted. + """ + logger.info(f"Executing tool: find_free_slots for items: {items}") + try: + access_token = get_auth_token() + service = get_calendar_service(access_token) + + # Default to primary calendar if none specified + if not items: + items = ["primary"] + + # Parse timezone + try: + tz = ZoneInfo(timezone) + except Exception: + logger.warning(f"Invalid timezone {timezone}, defaulting to UTC") + tz = ZoneInfo("UTC") + timezone = "UTC" + + # Default to current day in specified timezone if time range not provided + now = datetime.now(tz) + if not time_min: + time_min_dt = now.replace(hour=0, minute=0, second=0, microsecond=0) + else: + time_min_dt = datetime.fromisoformat(time_min.replace('Z', '+00:00')) + if time_min_dt.tzinfo is None: + time_min_dt = time_min_dt.replace(tzinfo=tz) + else: + # Convert to requested timezone + time_min_dt = time_min_dt.astimezone(tz) + + if not time_max: + time_max_dt = now.replace(hour=23, minute=59, second=59, microsecond=999999) + else: + time_max_dt = datetime.fromisoformat(time_max.replace('Z', '+00:00')) + if time_max_dt.tzinfo is None: + time_max_dt = time_max_dt.replace(tzinfo=tz) + else: + # Convert to requested timezone + time_max_dt = time_max_dt.astimezone(tz) + + # Validate time range + if time_min_dt >= time_max_dt: + raise ValueError(f"time_min must precede time_max") + + # Prepare freebusy query + body = { + "timeMin": time_min_dt.isoformat(), + "timeMax": time_max_dt.isoformat(), + "timeZone": timezone, + "items": [{"id": item} for item in items], + } + + # Query freebusy information + freebusy_result = service.freebusy().query(body=body).execute() + + # Process results for each calendar - create simple structure + calendars = {} + + for item in items: + calendar_data = freebusy_result.get("calendars", {}).get(item, {}) + + # Check for errors + if "errors" in calendar_data: + calendars[item] = { + "error": calendar_data["errors"][0].get("reason", "Unknown error"), + "busy": [], + "free": [], + } + continue + + busy_periods = calendar_data.get("busy", []) + + # Convert busy periods to simple format with timezone + busy_slots = [] + for busy in busy_periods: + busy_start = datetime.fromisoformat(busy["start"].replace('Z', '+00:00')).astimezone(tz) + busy_end = datetime.fromisoformat(busy["end"].replace('Z', '+00:00')).astimezone(tz) + busy_slots.append({ + "start": busy_start.isoformat(), + "end": busy_end.isoformat(), + }) + + # Calculate free slots (gaps between busy periods) + free_slots = [] + + # Sort busy periods by start time + sorted_busy = sorted( + [(datetime.fromisoformat(b["start"].replace('Z', '+00:00')).astimezone(tz), + datetime.fromisoformat(b["end"].replace('Z', '+00:00')).astimezone(tz)) + for b in busy_periods], + key=lambda x: x[0] + ) + + # Check for free slot at the beginning + if not sorted_busy or sorted_busy[0][0] > time_min_dt: + gap_end = sorted_busy[0][0] if sorted_busy else time_max_dt + duration_minutes = int((gap_end - time_min_dt).total_seconds() / 60) + if duration_minutes >= min_slot_duration_minutes: + free_slots.append({ + "start": time_min_dt.isoformat(), + "end": gap_end.isoformat(), + }) + + # Find gaps between busy periods + for i in range(len(sorted_busy) - 1): + gap_start = sorted_busy[i][1] # End of current busy period + gap_end = sorted_busy[i + 1][0] # Start of next busy period + + if gap_start < gap_end: + duration_minutes = int((gap_end - gap_start).total_seconds() / 60) + if duration_minutes >= min_slot_duration_minutes: + free_slots.append({ + "start": gap_start.isoformat(), + "end": gap_end.isoformat(), + }) + + # Check for free slot at the end + if sorted_busy and sorted_busy[-1][1] < time_max_dt: + gap_start = sorted_busy[-1][1] + duration_minutes = int((time_max_dt - gap_start).total_seconds() / 60) + if duration_minutes >= min_slot_duration_minutes: + free_slots.append({ + "start": gap_start.isoformat(), + "end": time_max_dt.isoformat(), + }) + + # If no busy periods, the entire time range is free + if not sorted_busy: + duration_minutes = int((time_max_dt - time_min_dt).total_seconds() / 60) + if duration_minutes >= min_slot_duration_minutes: + free_slots.append({ + "start": time_min_dt.isoformat(), + "end": time_max_dt.isoformat(), + }) + + calendars[item] = { + "busy": busy_slots, + "free": free_slots, + } + + return { + "calendars": calendars, + } + + except HttpError as e: + logger.error(f"Google Calendar API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Calendar API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool find_free_slots: {e}") + raise e + +def _warmup_contact_search(access_token: str, contact_type: str): + """ + Send warmup request with empty query to update the cache. + + According to Google's documentation, searchContacts and otherContacts.search + require a warmup request before actual searches for better performance. + See: https://developers.google.com/people/v1/contacts#search_the_users_contacts + and https://developers.google.com/people/v1/other-contacts#search_the_users_other_contacts + + Note: Creates its own service instance to avoid thread safety issues with httplib2. + """ + try: + # Create a separate service instance for this thread + service = get_people_service(access_token) + + if contact_type == 'personal': + # Warmup for people.searchContacts + service.people().searchContacts( + query="", + pageSize=1, + readMask='names' + ).execute() + logger.info("Warmup request sent for personal contacts") + elif contact_type == 'other': + # Warmup for otherContacts.search + service.otherContacts().search( + query="", + pageSize=1, + readMask='names' + ).execute() + logger.info("Warmup request sent for other contacts") + except Exception as e: + # Don't fail if warmup fails, just log it + logger.warning(f"Warmup request failed for {contact_type} contacts: {e}") + +async def search_contacts( + query: str, + contact_type: str = "all", + page_size: int = 10, + page_token: str | None = None, + directory_sources: str = "UNSPECIFIED", +) -> Dict[str, Any]: + """ + Search for contacts by name or email address. + + Supports searching personal contacts, other contact sources, domain directory, + or all sources simultaneously. When contact_type is 'all' (default), returns + three separate result sets (personal, other, directory) each with independent + pagination tokens. + """ + logger.info(f"Executing tool: search_contacts with query: {query}, contact_type: {contact_type}") + try: + access_token = get_auth_token() + service = get_people_service(access_token) + + # Define the read mask for calendar-relevant person fields + # Only includes fields necessary for calendar operations (creating events, adding attendees) + comprehensive_read_mask = 'names,emailAddresses,organizations,phoneNumbers,metadata' + + # Limited read mask for other contacts + limited_read_mask = 'emailAddresses,metadata,names,phoneNumbers' + + def format_contact(person: Dict[str, Any], contact_type_label: str) -> Dict[str, Any]: + """Helper function to format a person object into structured contact data.""" + names = person.get('names', []) + emails = person.get('emailAddresses', []) + phones = person.get('phoneNumbers', []) + orgs = person.get('organizations', []) + + return { + 'resourceName': person.get('resourceName', ''), + 'displayName': names[0].get('displayName', 'Unknown') if names else 'Unknown', + 'firstName': names[0].get('givenName', '') if names else '', + 'lastName': names[0].get('familyName', '') if names else '', + 'contactType': contact_type_label, + 'emailAddresses': [ + { + 'email': email.get('value', ''), + 'type': email.get('type', 'other').lower(), + } + for email in emails + ], + 'phoneNumbers': [ + { + 'number': phone.get('value', ''), + 'type': phone.get('type', 'other').lower(), + } + for phone in phones + ], + 'organizations': [ + { + 'name': org.get('name', ''), + 'title': org.get('title', ''), + } + for org in orgs + ], + } + + if contact_type == 'all': + # Execute all three searches in parallel (with warmup for personal and other) + import asyncio + + # Use ThreadPoolExecutor for blocking Google API calls + from concurrent.futures import ThreadPoolExecutor + + def search_personal(): + # Create separate service instance for thread safety + personal_service = get_people_service(access_token) + return personal_service.people().searchContacts( + query=query, + pageSize=min(page_size, 30), + readMask=comprehensive_read_mask, + ).execute() + + def search_other(): + # Create separate service instance for thread safety + other_service = get_people_service(access_token) + return other_service.otherContacts().search( + query=query, + pageSize=min(page_size, 30), + readMask=limited_read_mask, + ).execute() + + def search_directory(): + # Create separate service instance for thread safety + directory_service = get_people_service(access_token) + return directory_service.people().searchDirectoryPeople( + query=query, + pageSize=min(page_size, 500), + readMask=comprehensive_read_mask, + sources=['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE', 'DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + ).execute() + + # Run warmup requests first, then all three searches in parallel + loop = asyncio.get_event_loop() + with ThreadPoolExecutor(max_workers=5) as executor: + # Send warmup requests for personal and other contacts + warmup_personal_future = loop.run_in_executor( + executor, _warmup_contact_search, access_token, 'personal' + ) + warmup_other_future = loop.run_in_executor( + executor, _warmup_contact_search, access_token, 'other' + ) + + # Wait for warmup to complete + await asyncio.gather(warmup_personal_future, warmup_other_future) + + # Now execute actual searches in parallel + personal_future = loop.run_in_executor(executor, search_personal) + other_future = loop.run_in_executor(executor, search_other) + directory_future = loop.run_in_executor(executor, search_directory) + + personal_res, other_res, directory_res = await asyncio.gather( + personal_future, other_future, directory_future + ) + + # Process personal results + personal_results = [ + format_contact(result.get('person', {}), 'personal') + for result in personal_res.get('results', []) + ] + + # Process other results + other_results = [ + format_contact(result.get('person', {}), 'other') + for result in other_res.get('results', []) + ] + + # Process directory results + directory_results = [ + format_contact(person, 'directory') + for person in directory_res.get('people', []) + ] + + # Return three independent result sets with pagination info + return { + 'message': f'Found contacts matching "{query}" from all sources', + 'query': query, + 'contactType': 'all', + 'personal': { + 'resultCount': len(personal_results), + 'nextPageToken': personal_res.get('nextPageToken'), + 'contacts': personal_results, + }, + 'other': { + 'resultCount': len(other_results), + 'nextPageToken': other_res.get('nextPageToken'), + 'contacts': other_results, + }, + 'directory': { + 'resultCount': len(directory_results), + 'nextPageToken': directory_res.get('nextPageToken'), + 'contacts': directory_results, + }, + } + + elif contact_type == 'personal': + # Send warmup request before actual search + import asyncio + from concurrent.futures import ThreadPoolExecutor + + loop = asyncio.get_event_loop() + with ThreadPoolExecutor(max_workers=1) as executor: + await loop.run_in_executor(executor, _warmup_contact_search, access_token, 'personal') + + response = service.people().searchContacts( + query=query, + pageSize=min(page_size, 30), + readMask=comprehensive_read_mask, + ).execute() + + results = [ + format_contact(result.get('person', {}), 'personal') + for result in response.get('results', []) + ] + + return { + 'message': f'Found {len(results)} personal contact(s) matching "{query}"', + 'query': query, + 'contactType': contact_type, + 'resultCount': len(results), + 'nextPageToken': response.get('nextPageToken'), + 'contacts': results, + } + + elif contact_type == 'other': + # Send warmup request before actual search + import asyncio + from concurrent.futures import ThreadPoolExecutor + + loop = asyncio.get_event_loop() + with ThreadPoolExecutor(max_workers=1) as executor: + await loop.run_in_executor(executor, _warmup_contact_search, access_token, 'other') + + response = service.otherContacts().search( + query=query, + pageSize=min(page_size, 30), + readMask=limited_read_mask, + ).execute() + + results = [ + format_contact(result.get('person', {}), 'other') + for result in response.get('results', []) + ] + + return { + 'message': f'Found {len(results)} other contact(s) matching "{query}"', + 'query': query, + 'contactType': contact_type, + 'resultCount': len(results), + 'nextPageToken': response.get('nextPageToken'), + 'contacts': results, + } + + elif contact_type == 'directory': + # Map directory sources + source_map = { + 'UNSPECIFIED': ['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE', 'DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + 'DOMAIN_DIRECTORY': ['DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE'], + 'DOMAIN_CONTACTS': ['DIRECTORY_SOURCE_TYPE_DOMAIN_CONTACT'], + } + sources = source_map.get(directory_sources, source_map['UNSPECIFIED']) + + response = service.people().searchDirectoryPeople( + query=query, + pageSize=min(page_size, 500), + readMask=comprehensive_read_mask, + sources=sources, + pageToken=page_token, + ).execute() + + results = [ + format_contact(person, 'directory') + for person in response.get('people', []) + ] + + return { + 'message': f'Found {len(results)} directory contact(s) matching "{query}"', + 'query': query, + 'contactType': contact_type, + 'resultCount': len(results), + 'nextPageToken': response.get('nextPageToken'), + 'contacts': results, + } + + else: + raise ValueError(f"Invalid contact_type: {contact_type}. Must be one of: all, personal, other, directory") + + except HttpError as e: + logger.error(f"Google People API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google People API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool search_contacts: {e}") + raise e + +@click.command() +@click.option("--port", default=GOOGLE_CALENDAR_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("google-calendar-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="google_calendar_get_current_time", + description="Get the accurate current date and time in the user's timezone. CRITICAL: Always call this tool FIRST before any calendar operations (creating, updating, listing, or scheduling events) to prevent using outdated time information. NOTE: If current time information is already provided in the system prompt or context, you do NOT need to call this tool", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_CONTEXT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_calendar_list_calendars", + description="List all calendars accessible by the user.", + inputSchema={ + "type": "object", + "properties": { + "max_results": { + "type": "integer", + "description": "The maximum number of calendars to return. Up to 250 calendars, defaults to 10.", + "default": 10, + "minimum": 1, + "maximum": 250, + }, + "show_deleted": { + "type": "boolean", + "description": "Whether to show deleted calendars. Defaults to False", + "default": False, + }, + "show_hidden": { + "type": "boolean", + "description": "Whether to show hidden calendars. Defaults to False", + "default": False, + }, + "next_page_token": { + "type": "string", + "description": "The token to retrieve the next page of calendars. Optional.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_CALENDAR", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_calendar_create_event", + description="Create a new event/meeting/sync/meetup in the specified calendar.", + inputSchema={ + "type": "object", + "required": ["summary", "start_datetime", "end_datetime"], + "properties": { + "summary": { + "type": "string", + "description": "The title of the event", + }, + "start_datetime": { + "type": "string", + "description": "The datetime when the event starts in ISO 8601 format, e.g., '2024-12-31T15:30:00' or '2024-12-31T15:30:00-07:00' with timezone.", + }, + "end_datetime": { + "type": "string", + "description": "The datetime when the event ends in ISO 8601 format, e.g., '2024-12-31T17:30:00' or '2024-12-31T17:30:00-07:00' with timezone.", + }, + "calendar_id": { + "type": "string", + "description": "The ID of the calendar to create the event in, usually 'primary'.", + "default": "primary", + }, + "description": { + "type": "string", + "description": "The description of the event", + }, + "location": { + "type": "string", + "description": "The location of the event", + }, + "visibility": { + "type": "string", + "description": "The visibility of the event", + "enum": ["default", "public", "private"], + "default": "default", + }, + "attendees": { + "type": "array", + "items": {"type": "string"}, + "description": "The list of attendee emails. Must be valid email addresses e.g., username@domain.com. You can use google_contact_search_contact tool to find contact emails. YOU MUST NOT assume attendees' email addresses unless it is explicitly provided.", + }, + "send_updates": { + "type": "string", + "description": "Should attendees be notified of the update?", + "enum": ["all", "externalOnly", "none"], + "default": "all", + }, + "add_google_meet": { + "type": "boolean", + "description": "Whether to add a Google Meet conference to the event.", + "default": False, + }, + "recurrence": { + "type": "array", + "items": {"type": "string"}, + "description": "List of RRULE, EXRULE, RDATE and EXDATE lines for a recurring event, as specified in RFC5545. Examples: ['RRULE:FREQ=DAILY;COUNT=5'] for 5 days, ['RRULE:FREQ=WEEKLY;BYDAY=MO,WE,FR;COUNT=10'] for 10 occurrences on Mon/Wed/Fri, ['RRULE:FREQ=MONTHLY;BYDAY=2TH'] for 2nd Thursday each month. Common frequencies: DAILY, WEEKLY, MONTHLY, YEARLY. Use COUNT for number of occurrences or UNTIL for end date (format: YYYYMMDDTHHMMSSZ).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT"} + ), + ), + types.Tool( + name="google_calendar_list_events", + description="List events from the specified calendar within the given datetime range.", + inputSchema={ + "type": "object", + "required": ["min_end_datetime", "max_start_datetime"], + "properties": { + "min_end_datetime": { + "type": "string", + "description": "Filter by events that end on or after this datetime in ISO 8601 format, e.g., '2024-09-15T09:00:00' or '2024-09-15T09:00:00-07:00' with timezone.", + }, + "max_start_datetime": { + "type": "string", + "description": "Filter by events that start before this datetime in ISO 8601 format, e.g., '2024-09-16T17:00:00' or '2024-09-16T17:00:00-07:00' with timezone.", + }, + "calendar_id": { + "type": "string", + "description": "The ID of the calendar to list events from", + "default": "primary", + }, + "max_results": { + "type": "integer", + "description": "The maximum number of events to return", + "default": 10, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_calendar_update_event", + description="Update an existing event in the specified calendar with the provided details.", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": { + "type": "string", + "description": "The ID of the event to update", + }, + "updated_start_datetime": { + "type": "string", + "description": "The updated datetime that the event starts in ISO 8601 format, e.g., '2024-12-31T15:30:00' or '2024-12-31T15:30:00-07:00' with timezone.", + }, + "updated_end_datetime": { + "type": "string", + "description": "The updated datetime that the event ends in ISO 8601 format, e.g., '2024-12-31T17:30:00' or '2024-12-31T17:30:00-07:00' with timezone.", + }, + "updated_summary": { + "type": "string", + "description": "The updated title of the event", + }, + "updated_description": { + "type": "string", + "description": "The updated description of the event", + }, + "updated_location": { + "type": "string", + "description": "The updated location of the event", + }, + "updated_visibility": { + "type": "string", + "description": "The visibility of the event", + "enum": ["default", "public", "private"], + }, + "attendees_to_add": { + "type": "array", + "items": {"type": "string"}, + "description": "The list of attendee emails to add. Must be valid email addresses e.g., username@domain.com. You can use google_contact_search_contact tool to find contact emails. YOU MUST NOT assume attendees' email addresses unless it is explicitly provided.", + }, + "attendees_to_remove": { + "type": "array", + "items": {"type": "string"}, + "description": "The list of attendee emails to remove. Must be valid email addresses e.g., username@domain.com. You can use google_contact_search_contact tool to find contact emails. YOU MUST NOT assume attendees' email addresses unless it is explicitly provided.", + }, + "updated_recurrence": { + "type": "array", + "items": {"type": "string"}, + "description": "Updated recurrence rules in RRULE format (RFC5545). To convert a recurring event to a single event, pass an empty array []. To add/update recurrence, provide rules like: ['RRULE:FREQ=DAILY;COUNT=5'] for 5 days, ['RRULE:FREQ=WEEKLY;BYDAY=MO,WE,FR;COUNT=10'] for 10 occurrences on Mon/Wed/Fri, ['RRULE:FREQ=MONTHLY;BYDAY=2TH'] for 2nd Thursday each month. Common frequencies: DAILY, WEEKLY, MONTHLY, YEARLY. Use COUNT for number of occurrences or UNTIL for end date (format: YYYYMMDDTHHMMSSZ).", + }, + "send_updates": { + "type": "string", + "description": "Should attendees be notified of the update?", + "enum": ["all", "externalOnly", "none"], + "default": "all", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT"} + ), + ), + types.Tool( + name="google_calendar_delete_event", + description="Delete an event from Google Calendar.", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": { + "type": "string", + "description": "The ID of the event to delete", + }, + "calendar_id": { + "type": "string", + "description": "The ID of the calendar containing the event", + "default": "primary", + }, + "send_updates": { + "type": "string", + "description": "Specifies which attendees to notify about the deletion", + "enum": ["all", "externalOnly", "none"], + "default": "all", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT"} + ), + ), + types.Tool( + name="google_calendar_add_attendees_to_event", + description="Add attendees to an existing event in Google Calendar.", + inputSchema={ + "type": "object", + "required": ["event_id", "attendees"], + "properties": { + "event_id": { + "type": "string", + "description": "The ID of the event to add attendees to", + }, + "attendees": { + "type": "array", + "items": {"type": "string"}, + "description": "The list of attendee emails to add. Must be valid email addresses e.g., username@domain.com. You can use google_contact_search_contact tool to find contact emails. YOU MUST NOT assume attendees' email addresses unless it is explicitly provided.", + }, + "calendar_id": { + "type": "string", + "description": "The ID of the calendar containing the event", + "default": "primary", + }, + "send_updates": { + "type": "string", + "description": "Specifies which attendees to notify about the addition", + "enum": ["all", "externalOnly", "none"], + "default": "all", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT"} + ), + ), + types.Tool( + name="google_calendar_find_free_slots", + description="Find both free and busy time slots in Google Calendars for specified calendars within a defined time range (defaults to the current day UTC if time_min/time_max are omitted). Returns busy intervals and calculated free slots by finding gaps between busy periods; time_min must precede time_max if both are provided. This action retrieves free and busy time slots for the specified calendars over a given time period. It analyzes the busy intervals from the calendars and provides calculated free slots based on the gaps in the busy periods. All returned times include timezone information and are formatted in the requested timezone for easy interpretation and scheduling.", + inputSchema={ + "type": "object", + "properties": { + "items": { + "type": "array", + "items": {"type": "string"}, + "description": "List of calendar email addresses to check for availability. Use 'primary' for the user's primary calendar, or specify email addresses like 'user@domain.com'. Defaults to ['primary'] if not provided.", + }, + "time_min": { + "type": "string", + "description": "The start of the time range to search in ISO 8601 format (e.g., '2024-12-31T09:00:00' or '2024-12-31T09:00:00-07:00' with timezone). If omitted, defaults to the start of the current day (00:00:00) in the specified timezone.", + }, + "time_max": { + "type": "string", + "description": "The end of the time range to search in ISO 8601 format (e.g., '2024-12-31T17:00:00' or '2024-12-31T17:00:00-07:00' with timezone). If omitted, defaults to the end of the current day (23:59:59) in the specified timezone. Must be after time_min.", + }, + "timezone": { + "type": "string", + "description": "Timezone for the time range and output (e.g., 'America/Los_Angeles', 'Europe/London', 'Asia/Tokyo'). Defaults to 'UTC'. All returned times will be in this timezone.", + "default": "UTC", + }, + "min_slot_duration_minutes": { + "type": "integer", + "description": "Minimum duration in minutes for a time slot to be considered as a valid free slot. Free slots shorter than this duration will be filtered out. Defaults to 30 minutes.", + "default": 30, + "minimum": 1, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_AVAILABILITY", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_calendar_search_contacts", + description="Search for contacts when you need to know the contact details. Supports searching personal contacts, other contact sources, domain directory, or all sources simultaneously. When contactType is 'all' (default), returns three separate result sets (personal, other, directory) each with independent pagination tokens for flexible paginated access to individual sources.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "The plain-text search query for contact names, email addresses, phone numbers, etc.", + }, + "contactType": { + "type": "string", + "description": "Type of contacts to search: 'all' (search all types - returns three separate result sets with independent pagination tokens), 'personal' (your saved contacts), 'other' (other contact sources like Gmail suggestions), or 'directory' (domain directory). Defaults to 'all'.", + "enum": ["all", "personal", "other", "directory"], + "default": "all", + }, + "pageSize": { + "type": "integer", + "description": "Number of results to return. For personal/other: max 30, for directory: max 500. Defaults to 10.", + "default": 10, + "minimum": 1, + }, + "pageToken": { + "type": "string", + "description": "Page token for pagination (used with directory searches). Optional.", + }, + "directorySources": { + "type": "string", + "description": "Directory sources to search (only used for directory type): 'UNSPECIFIED' (both domain directory and contacts), 'DOMAIN_DIRECTORY' (domain directory only), or 'DOMAIN_CONTACTS' (domain contacts only). Defaults to 'UNSPECIFIED'.", + "enum": ["UNSPECIFIED", "DOMAIN_DIRECTORY", "DOMAIN_CONTACTS"], + "default": "UNSPECIFIED", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_CALENDAR_EVENT", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "google_calendar_get_current_time": + try: + result = await get_current_time() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_list_calendars": + try: + max_results = arguments.get("max_results", 10) + show_deleted = arguments.get("show_deleted", False) + show_hidden = arguments.get("show_hidden", False) + next_page_token = arguments.get("next_page_token") + + result = await list_calendars(max_results, show_deleted, show_hidden, next_page_token) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_create_event": + try: + summary = arguments.get("summary") + start_datetime = arguments.get("start_datetime") + end_datetime = arguments.get("end_datetime") + + if not summary or not start_datetime or not end_datetime: + return [ + types.TextContent( + type="text", + text="Error: summary, start_datetime and end_datetime parameters are required", + ) + ] + + calendar_id = arguments.get("calendar_id", "primary") + description = arguments.get("description") + location = arguments.get("location") + visibility = arguments.get("visibility", "default") + attendees = arguments.get("attendees") + send_updates = arguments.get("send_updates", "all") + add_google_meet = arguments.get("add_google_meet", False) + recurrence = arguments.get("recurrence") + + result = await create_event( + summary, start_datetime, end_datetime, calendar_id, + description, location, visibility, attendees, send_updates, + add_google_meet, recurrence + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_list_events": + try: + min_end_datetime = arguments.get("min_end_datetime") + max_start_datetime = arguments.get("max_start_datetime") + + if not min_end_datetime or not max_start_datetime: + return [ + types.TextContent( + type="text", + text="Error: min_end_datetime and max_start_datetime parameters are required", + ) + ] + + calendar_id = arguments.get("calendar_id", "primary") + max_results = arguments.get("max_results", 10) + + result = await list_events(min_end_datetime, max_start_datetime, calendar_id, max_results) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_update_event": + try: + event_id = arguments.get("event_id") + + if not event_id: + return [ + types.TextContent( + type="text", + text="Error: event_id parameter is required", + ) + ] + + updated_start_datetime = arguments.get("updated_start_datetime") + updated_end_datetime = arguments.get("updated_end_datetime") + updated_summary = arguments.get("updated_summary") + updated_description = arguments.get("updated_description") + updated_location = arguments.get("updated_location") + updated_visibility = arguments.get("updated_visibility") + attendees_to_add = arguments.get("attendees_to_add") + attendees_to_remove = arguments.get("attendees_to_remove") + updated_recurrence = arguments.get("updated_recurrence") + send_updates = arguments.get("send_updates", "all") + + result = await update_event( + event_id, updated_start_datetime, updated_end_datetime, + updated_summary, updated_description, updated_location, + updated_visibility, attendees_to_add, attendees_to_remove, + updated_recurrence, send_updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_delete_event": + try: + event_id = arguments.get("event_id") + + if not event_id: + return [ + types.TextContent( + type="text", + text="Error: event_id parameter is required", + ) + ] + + calendar_id = arguments.get("calendar_id", "primary") + send_updates = arguments.get("send_updates", "all") + + result = await delete_event(event_id, calendar_id, send_updates) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_add_attendees_to_event": + try: + event_id = arguments.get("event_id") + attendees = arguments.get("attendees") + + if not event_id: + return [ + types.TextContent( + type="text", + text="Error: event_id parameter is required", + ) + ] + + if not attendees: + return [ + types.TextContent( + type="text", + text="Error: attendees parameter is required", + ) + ] + + calendar_id = arguments.get("calendar_id", "primary") + send_updates = arguments.get("send_updates", "all") + + result = await add_attendees_to_event(event_id, attendees, calendar_id, send_updates) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_find_free_slots": + try: + items = arguments.get("items") + time_min = arguments.get("time_min") + time_max = arguments.get("time_max") + timezone = arguments.get("timezone", "UTC") + min_slot_duration_minutes = arguments.get("min_slot_duration_minutes", 30) + + result = await find_free_slots( + items=items, + time_min=time_min, + time_max=time_max, + timezone=timezone, + min_slot_duration_minutes=min_slot_duration_minutes + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_calendar_search_contacts": + try: + query = arguments.get("query") + + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + contact_type = arguments.get("contactType", "all") + page_size = arguments.get("pageSize", 10) + page_token = arguments.get("pageToken") + directory_sources = arguments.get("directorySources", "UNSPECIFIED") + + result = await search_contacts( + query=query, + contact_type=contact_type, + page_size=page_size, + page_token=page_token, + directory_sources=directory_sources + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/google_docs/.env.example b/mcp_servers/google_docs/.env.example new file mode 100644 index 00000000..57cc54ef --- /dev/null +++ b/mcp_servers/google_docs/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +GOOGLE_DOCS_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/google_docs/Dockerfile b/mcp_servers/google_docs/Dockerfile new file mode 100644 index 00000000..69a25e8d --- /dev/null +++ b/mcp_servers/google_docs/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/google_docs/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/google_docs/server.py . + +COPY mcp_servers/google_docs/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/google_docs/README.md b/mcp_servers/google_docs/README.md new file mode 100644 index 00000000..41ee87b1 --- /dev/null +++ b/mcp_servers/google_docs/README.md @@ -0,0 +1,78 @@ +# Google Docs MCP Server + +A Model Context Protocol (MCP) server for Google Docs integration. Create, edit, and manage Google Documents using Google Docs API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Docs with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_DOCS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-docs-mcp-server:latest + + +# Run Google Docs MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-docs-mcp-server:latest + + +# Run Google Docs MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_google_access_token_here"}' \ + ghcr.io/klavis-ai/google-docs-mcp-server:latest +``` + +**OAuth Setup:** Google Docs requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Document Management**: Create, read, update Google Documents +- **Content Editing**: Insert, modify, and format document content +- **Collaboration**: Manage sharing and permissions +- **Comments**: Add and manage document comments +- **Revision History**: Access document version history + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_docs/requirements.txt b/mcp_servers/google_docs/requirements.txt new file mode 100644 index 00000000..ced06de0 --- /dev/null +++ b/mcp_servers/google_docs/requirements.txt @@ -0,0 +1,12 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +google-auth +google-auth-oauthlib +google-auth-httplib2 +google-api-python-client +click +starlette \ No newline at end of file diff --git a/mcp_servers/google_docs/server.py b/mcp_servers/google_docs/server.py new file mode 100644 index 00000000..fac93d56 --- /dev/null +++ b/mcp_servers/google_docs/server.py @@ -0,0 +1,580 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +GOOGLE_DOCS_MCP_SERVER_PORT = int(os.getenv("GOOGLE_DOCS_MCP_SERVER_PORT", "5000")) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +def get_docs_service(access_token: str): + """Create Google Docs service with access token.""" + credentials = Credentials(token=access_token) + return build('docs', 'v1', credentials=credentials) + +def get_drive_service(access_token: str): + """Create Google Drive service with access token.""" + credentials = Credentials(token=access_token) + return build('drive', 'v3', credentials=credentials) + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +async def get_document_by_id(document_id: str) -> Dict[str, Any]: + """Get the latest version of the specified Google Docs document.""" + logger.info(f"Executing tool: get_document_by_id with document_id: {document_id}") + try: + access_token = get_auth_token() + service = get_docs_service(access_token) + + request = service.documents().get(documentId=document_id) + response = request.execute() + + return dict(response) + except HttpError as e: + logger.error(f"Google Docs API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Docs API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_document_by_id: {e}") + raise e + +async def insert_text_at_end(document_id: str, text: str) -> Dict[str, Any]: + """Insert text at the end of a Google Docs document.""" + logger.info(f"Executing tool: insert_text_at_end with document_id: {document_id}") + try: + access_token = get_auth_token() + service = get_docs_service(access_token) + + document = await get_document_by_id(document_id) + + end_index = document["body"]["content"][-1]["endIndex"] + + requests = [ + { + 'insertText': { + 'location': { + 'index': int(end_index) - 1 + }, + 'text': text + } + } + ] + + # Execute the request + response = ( + service.documents() + .batchUpdate(documentId=document_id, body={"requests": requests}) + .execute() + ) + + return dict(response) + except HttpError as e: + logger.error(f"Google Docs API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Docs API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool insert_text_at_end: {e}") + raise e + +async def create_blank_document(title: str) -> Dict[str, Any]: + """Create a new blank Google Docs document with a title.""" + logger.info(f"Executing tool: create_blank_document with title: {title}") + try: + access_token = get_auth_token() + service = get_docs_service(access_token) + + body = {"title": title} + + request = service.documents().create(body=body) + response = request.execute() + + return { + "title": response["title"], + "document_id": response["documentId"], + "document_url": f"https://docs.google.com/document/d/%7Bresponse['documentId']}/edit", + } + except HttpError as e: + logger.error(f"Google Docs API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Docs API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_blank_document: {e}") + raise e + +async def create_document_from_text(title: str, text_content: str) -> Dict[str, Any]: + """Create a new Google Docs document with specified text content.""" + logger.info(f"Executing tool: create_document_from_text with title: {title}") + try: + # First, create a blank document + document = await create_blank_document(title) + + access_token = get_auth_token() + service = get_docs_service(access_token) + + # Insert the text content + requests = [ + { + "insertText": { + "location": { + "index": 1, + }, + "text": text_content, + } + } + ] + + # Execute the batchUpdate method to insert text + service.documents().batchUpdate( + documentId=document["document_id"], body={"requests": requests} + ).execute() + + return { + "title": document["title"], + "documentId": document["document_id"], + "documentUrl": f"https://docs.google.com/document/d/%7Bdocument['document_id']}/edit", + } + except HttpError as e: + logger.error(f"Google Docs API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Docs API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_document_from_text: {e}") + raise e + +async def get_all_documents() -> Dict[str, Any]: + """Get all Google Docs documents from the user's Drive.""" + logger.info(f"Executing tool: get_all_documents") + try: + access_token = get_auth_token() + service = get_drive_service(access_token) + + # Query for Google Docs files + query = "mimeType='application/vnd.google-apps.document'" + + request = service.files().list( + q=query, + fields="nextPageToken, files(id, name, createdTime, modifiedTime, webViewLink)", + orderBy="modifiedTime desc" + ) + response = request.execute() + + documents = [] + for file in response.get('files', []): + documents.append({ + 'id': file['id'], + 'name': file['name'], + 'createdTime': file.get('createdTime'), + 'modifiedTime': file.get('modifiedTime'), + 'webViewLink': file.get('webViewLink') + }) + + return { + 'documents': documents, + 'total_count': len(documents) + } + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_all_documents: {e}") + raise e + +@click.command() +@click.option("--port", default=GOOGLE_DOCS_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("google-docs-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="google_docs_get_document_by_id", + description="Retrieve a Google Docs document by ID.", + inputSchema={ + "type": "object", + "required": ["document_id"], + "properties": { + "document_id": { + "type": "string", + "description": "The ID of the Google Docs document to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DOCS_DOCUMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_docs_get_all_documents", + description="Get all Google Docs documents from the user's Drive.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DOCS_DOCUMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_docs_insert_text_at_end", + description="Insert text at the end of a Google Docs document.", + inputSchema={ + "type": "object", + "required": ["document_id", "text"], + "properties": { + "document_id": { + "type": "string", + "description": "The ID of the Google Docs document to modify.", + }, + "text": { + "type": "string", + "description": "The text content to insert at the end of the document.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DOCS_DOCUMENT"} + ), + ), + types.Tool( + name="google_docs_create_blank_document", + description="Create a new blank Google Docs document with a title.", + inputSchema={ + "type": "object", + "required": ["title"], + "properties": { + "title": { + "type": "string", + "description": "The title for the new document.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DOCS_DOCUMENT"} + ), + ), + types.Tool( + name="google_docs_create_document_from_text", + description="Create a new Google Docs document with specified text content.", + inputSchema={ + "type": "object", + "required": ["title", "text_content"], + "properties": { + "title": { + "type": "string", + "description": "The title for the new document.", + }, + "text_content": { + "type": "string", + "description": "The text content to include in the new document.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DOCS_DOCUMENT"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "google_docs_get_document_by_id": + document_id = arguments.get("document_id") + if not document_id: + return [ + types.TextContent( + type="text", + text="Error: document_id parameter is required", + ) + ] + + try: + result = await get_document_by_id(document_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_docs_get_all_documents": + try: + result = await get_all_documents() + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_docs_insert_text_at_end": + document_id = arguments.get("document_id") + text = arguments.get("text") + if not document_id or not text: + return [ + types.TextContent( + type="text", + text="Error: document_id and text parameters are required", + ) + ] + + try: + result = await insert_text_at_end(document_id, text) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_docs_create_blank_document": + title = arguments.get("title") + if not title: + return [ + types.TextContent( + type="text", + text="Error: title parameter is required", + ) + ] + + try: + result = await create_blank_document(title) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_docs_create_document_from_text": + title = arguments.get("title") + text_content = arguments.get("text_content") + if not title or not text_content: + return [ + types.TextContent( + type="text", + text="Error: title and text_content parameters are required", + ) + ] + + try: + result = await create_document_from_text(title, text_content) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/google_drive/.env.example b/mcp_servers/google_drive/.env.example new file mode 100644 index 00000000..80f3a984 --- /dev/null +++ b/mcp_servers/google_drive/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +GOOGLE_DRIVE_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/google_drive/Dockerfile b/mcp_servers/google_drive/Dockerfile new file mode 100644 index 00000000..22b1606a --- /dev/null +++ b/mcp_servers/google_drive/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/google_drive/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/google_drive/server.py . +COPY mcp_servers/google_drive/utils.py . + +COPY mcp_servers/google_drive/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/google_drive/README.md b/mcp_servers/google_drive/README.md new file mode 100644 index 00000000..9e0a6a9d --- /dev/null +++ b/mcp_servers/google_drive/README.md @@ -0,0 +1,78 @@ +# Google Drive MCP Server + +A Model Context Protocol (MCP) server for Google Drive integration. Manage files, folders, and sharing using Google Drive API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Drive with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_DRIVE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-drive-mcp-server:latest + + +# Run Google Drive MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-drive-mcp-server:latest + + +# Run Google Drive MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_google_access_token_here"}' \ + ghcr.io/klavis-ai/google-drive-mcp-server:latest +``` + +**OAuth Setup:** Google Drive requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **File Management**: Upload, download, and manage Drive files +- **Folder Operations**: Create and organize folders and directory structure +- **Sharing**: Manage file permissions and sharing settings +- **Search**: Search files and folders by name, content, and metadata +- **Collaboration**: Handle real-time collaboration and comments + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_drive/requirements.txt b/mcp_servers/google_drive/requirements.txt new file mode 100644 index 00000000..ced06de0 --- /dev/null +++ b/mcp_servers/google_drive/requirements.txt @@ -0,0 +1,12 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +google-auth +google-auth-oauthlib +google-auth-httplib2 +google-api-python-client +click +starlette \ No newline at end of file diff --git a/mcp_servers/google_drive/server.py b/mcp_servers/google_drive/server.py new file mode 100644 index 00000000..6acce560 --- /dev/null +++ b/mcp_servers/google_drive/server.py @@ -0,0 +1,981 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar +from enum import Enum + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +from utils import convert_document_to_html, convert_document_to_markdown + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +GOOGLE_DRIVE_MCP_SERVER_PORT = int(os.getenv("GOOGLE_DRIVE_MCP_SERVER_PORT", "5000")) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +# Define enums that are referenced in context.py +class OrderBy(Enum): + MODIFIED_TIME_DESC = "modifiedTime desc" + MODIFIED_TIME = "modifiedTime" + CREATED_TIME_DESC = "createdTime desc" + CREATED_TIME = "createdTime" + NAME = "name" + NAME_DESC = "name desc" + +class DocumentFormat(Enum): + MARKDOWN = "markdown" + HTML = "html" + +class Corpora(Enum): + USER = "user" + DRIVE = "drive" + DOMAIN = "domain" + +def get_drive_service(access_token: str): + """Create Google Drive service with access token.""" + credentials = Credentials(token=access_token) + return build('drive', 'v3', credentials=credentials) + +def get_docs_service(access_token: str): + """Create Google Docs service with access token.""" + credentials = Credentials(token=access_token) + return build('docs', 'v1', credentials=credentials) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +def remove_none_values(params: dict) -> dict: + """Remove None values from parameters dictionary.""" + return {k: v for k, v in params.items() if v is not None} + +async def get_document_content_by_id(document_id: str) -> Dict[str, Any]: + """Get the latest version of the specified Google Docs document.""" + logger.info(f"Executing tool: get_document_by_id with document_id: {document_id}") + try: + access_token = get_auth_token() + service = get_docs_service(access_token) + + request = service.documents().get(documentId=document_id) + response = request.execute() + + return dict(response) + except HttpError as e: + logger.error(f"Google Docs API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Docs API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_document_by_id: {e}") + raise e + +def build_files_list_query( + mime_type: str, + document_contains: list[str] | None = None, + document_not_contains: list[str] | None = None, +) -> str: + query = [f"(mimeType = '{mime_type}' and trashed = false)"] + + if isinstance(document_contains, str): + document_contains = [document_contains] + + if isinstance(document_not_contains, str): + document_not_contains = [document_not_contains] + + if document_contains: + for keyword in document_contains: + name_contains = keyword.replace("'", "\\'") + full_text_contains = keyword.replace("'", "\\'") + keyword_query = ( + f"(name contains '{name_contains}' or fullText contains '{full_text_contains}')" + ) + query.append(keyword_query) + + if document_not_contains: + for keyword in document_not_contains: + name_not_contains = keyword.replace("'", "\\'") + full_text_not_contains = keyword.replace("'", "\\'") + keyword_query = ( + f"(name not contains '{name_not_contains}' and " + f"fullText not contains '{full_text_not_contains}')" + ) + query.append(keyword_query) + + return " and ".join(query) + +def build_files_list_params( + mime_type: str, + page_size: int, + order_by: list[OrderBy], + pagination_token: str | None, + include_shared_drives: bool, + search_only_in_shared_drive_id: str | None, + include_organization_domain_documents: bool, + document_contains: list[str] | None = None, + document_not_contains: list[str] | None = None, +) -> dict[str, Any]: + query = build_files_list_query( + mime_type=mime_type, + document_contains=document_contains, + document_not_contains=document_not_contains, + ) + + params = { + "q": query, + "pageSize": page_size, + "orderBy": ",".join([item.value for item in order_by]), + "pageToken": pagination_token, + } + + if ( + include_shared_drives + or search_only_in_shared_drive_id + or include_organization_domain_documents + ): + params["includeItemsFromAllDrives"] = "true" + params["supportsAllDrives"] = "true" + + if search_only_in_shared_drive_id: + params["driveId"] = search_only_in_shared_drive_id + params["corpora"] = Corpora.DRIVE.value + + if include_organization_domain_documents: + params["corpora"] = Corpora.DOMAIN.value + + params = remove_none_values(params) + + return params + +def build_file_tree_request_params( + order_by: list[OrderBy] | None, + page_token: str | None, + limit: int | None, + include_shared_drives: bool, + restrict_to_shared_drive_id: str | None, + include_organization_domain_documents: bool, +) -> dict[str, Any]: + if order_by is None: + order_by = [OrderBy.MODIFIED_TIME_DESC] + elif isinstance(order_by, OrderBy): + order_by = [order_by] + + params = { + "q": "trashed = false", + "corpora": Corpora.USER.value, + "pageToken": page_token, + "fields": ( + "files(id, name, parents, mimeType, driveId, size, createdTime, modifiedTime, owners)" + ), + "orderBy": ",".join([item.value for item in order_by]), + } + + if limit: + params["pageSize"] = str(limit) + + if ( + include_shared_drives + or restrict_to_shared_drive_id + or include_organization_domain_documents + ): + params["includeItemsFromAllDrives"] = "true" + params["supportsAllDrives"] = "true" + + if restrict_to_shared_drive_id: + params["driveId"] = restrict_to_shared_drive_id + params["corpora"] = Corpora.DRIVE.value + + if include_organization_domain_documents: + params["corpora"] = Corpora.DOMAIN.value + + return params + +def build_file_tree(files: dict[str, Any]) -> dict[str, Any]: + file_tree: dict[str, Any] = {} + + for file in files.values(): + owners = file.get("owners", []) + if owners: + owners = [ + {"name": owner.get("displayName", ""), "email": owner.get("emailAddress", "")} + for owner in owners + ] + file["owners"] = owners + + if "size" in file: + file["size"] = {"value": int(file["size"]), "unit": "bytes"} + + # Although "parents" is a list, a file can only have one parent + try: + parent_id = file["parents"][0] + del file["parents"] + except (KeyError, IndexError): + parent_id = None + + # Determine the file's Drive ID + if "driveId" in file: + drive_id = file["driveId"] + del file["driveId"] + # If a shared drive id is not present, the file is in "My Drive" + else: + drive_id = "My Drive" + + if drive_id not in file_tree: + file_tree[drive_id] = [] + + # Root files will have the Drive's id as the parent. If the parent id is not in the files + # list, the file must be at drive's root + if parent_id not in files: + file_tree[drive_id].append(file) + + # Associate the file with its parent + else: + if "children" not in files[parent_id]: + files[parent_id]["children"] = [] + files[parent_id]["children"].append(file) + + return file_tree + +async def search_documents( + document_contains: list[str] | None = None, + document_not_contains: list[str] | None = None, + search_only_in_shared_drive_id: str | None = None, + include_shared_drives: bool = False, + include_organization_domain_documents: bool = False, + order_by: list[str] | None = None, + limit: int = 50, + pagination_token: str | None = None, +) -> Dict[str, Any]: + """Search for documents in the user's Google Drive.""" + logger.info(f"Executing tool: search_documents") + try: + access_token = get_auth_token() + service = get_drive_service(access_token) + + # Convert order_by strings to OrderBy enums + order_by_enums = [] + if order_by: + for order in order_by: + try: + order_by_enums.append(OrderBy(order)) + except ValueError: + order_by_enums.append(OrderBy.MODIFIED_TIME_DESC) + else: + order_by_enums = [OrderBy.MODIFIED_TIME_DESC] + + page_size = min(10, limit) + files: list[dict[str, Any]] = [] + + params = build_files_list_params( + mime_type="application/vnd.google-apps.document", + document_contains=document_contains, + document_not_contains=document_not_contains, + page_size=page_size, + order_by=order_by_enums, + pagination_token=pagination_token, + include_shared_drives=include_shared_drives, + search_only_in_shared_drive_id=search_only_in_shared_drive_id, + include_organization_domain_documents=include_organization_domain_documents, + ) + + while len(files) < limit: + if pagination_token: + params["pageToken"] = pagination_token + else: + params.pop("pageToken", None) + + results = service.files().list(**params).execute() + batch = results.get("files", []) + files.extend(batch[: limit - len(files)]) + + pagination_token = results.get("nextPageToken") + if not pagination_token or len(batch) < page_size: + break + + return {"documents_count": len(files), "documents": files} + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool search_documents: {e}") + raise e + +async def search_and_retrieve_documents( + return_format: str = "markdown", + document_contains: list[str] | None = None, + document_not_contains: list[str] | None = None, + search_only_in_shared_drive_id: str | None = None, + include_shared_drives: bool = False, + include_organization_domain_documents: bool = False, + order_by: list[str] | None = None, + limit: int = 50, + pagination_token: str | None = None, +) -> Dict[str, Any]: + """Search and retrieve the contents of Google documents in the user's Google Drive.""" + logger.info(f"Executing tool: search_and_retrieve_documents") + try: + # First search for documents + response = await search_documents( + document_contains=document_contains, + document_not_contains=document_not_contains, + search_only_in_shared_drive_id=search_only_in_shared_drive_id, + include_shared_drives=include_shared_drives, + include_organization_domain_documents=include_organization_domain_documents, + order_by=order_by, + limit=limit, + pagination_token=pagination_token, + ) + + documents = [] + for item in response["documents"]: + document = await get_document_content_by_id(item["id"]) + + # Convert document content to requested format + if return_format == DocumentFormat.MARKDOWN.value: + document_body = convert_document_to_markdown(document) + elif return_format == DocumentFormat.HTML.value: + document_body = convert_document_to_html(document) + else: + # Default to markdown if format is not recognized + document_body = convert_document_to_markdown(document) + + # Extract only the useful fields. Otherwise prompt will be too long. + filtered_document = { + "title": document.get("title", ""), + "body": document_body, + "documentId": document.get("documentId", item["id"]) + } + + documents.append(filtered_document) + + return {"documents_count": len(documents), "documents": documents} + except Exception as e: + logger.exception(f"Error executing tool search_and_retrieve_documents: {e}") + raise e + +async def empty_trash( + drive_id: str | None = None, +) -> Dict[str, Any]: + """Permanently delete all of the user's trashed files.""" + logger.info(f"Executing tool: empty_trash with drive_id: {drive_id}") + try: + access_token = get_auth_token() + + # Use v2 API for empty trash operation + credentials = Credentials(token=access_token) + service = build('drive', 'v2', credentials=credentials) + + params = {} + if drive_id: + params['driveId'] = drive_id + + service.files().emptyTrash(**params).execute() + + return {"success": True, "message": "Trash emptied successfully"} + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool empty_trash: {e}") + raise e + +async def create_shared_drive( + name: str, + request_id: str, +) -> Dict[str, Any]: + """Create a new shared drive.""" + logger.info(f"Executing tool: create_shared_drive with name: {name}, request_id: {request_id}") + try: + access_token = get_auth_token() + service = get_drive_service(access_token) + + drive_metadata = { + 'name': name + } + + result = service.drives().create( + body=drive_metadata, + requestId=request_id + ).execute() + + return result + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_shared_drive: {e}") + raise e + +async def get_file_tree_structure( + include_shared_drives: bool = False, + restrict_to_shared_drive_id: str | None = None, + include_organization_domain_documents: bool = False, + order_by: list[str] | None = None, + limit: int | None = None, +) -> Dict[str, Any]: + """Get the file/folder tree structure of the user's Google Drive.""" + logger.info(f"Executing tool: get_file_tree_structure") + try: + access_token = get_auth_token() + service = get_drive_service(access_token) + + # Convert order_by strings to OrderBy enums + order_by_enums = [] + if order_by: + for order in order_by: + try: + order_by_enums.append(OrderBy(order)) + except ValueError: + order_by_enums.append(OrderBy.MODIFIED_TIME_DESC) + else: + order_by_enums = None + + keep_paginating = True + page_token = None + files = {} + + params = build_file_tree_request_params( + order_by_enums, + page_token, + limit, + include_shared_drives, + restrict_to_shared_drive_id, + include_organization_domain_documents, + ) + + while keep_paginating: + # Get a list of files + results = service.files().list(**params).execute() + + # Update page token + page_token = results.get("nextPageToken") + params["pageToken"] = page_token + keep_paginating = page_token is not None + + for file in results.get("files", []): + files[file["id"]] = file + + if not files: + return {"drives": []} + + file_tree = build_file_tree(files) + + drives = [] + + for drive_id, drive_files in file_tree.items(): + if drive_id == "My Drive": + drive = {"name": "My Drive", "children": drive_files} + else: + try: + drive_details = service.drives().get(driveId=drive_id).execute() + drive_name = drive_details.get("name", "Shared Drive (name unavailable)") + except HttpError as e: + drive_name = ( + f"Shared Drive (name unavailable: 'HttpError {e.status_code}: {e.reason}')" + ) + + drive = {"name": drive_name, "id": drive_id, "children": drive_files} + + drives.append(drive) + + return {"drives": drives} + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_file_tree_structure: {e}") + raise e + +@click.command() +@click.option("--port", default=GOOGLE_DRIVE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("google-drive-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="google_drive_search_documents", + description="Search for documents in the user's Google Drive.", + inputSchema={ + "type": "object", + "properties": { + "document_contains": { + "type": "array", + "items": {"type": "string"}, + "description": "Keywords or phrases that must be in the document title or body.", + }, + "document_not_contains": { + "type": "array", + "items": {"type": "string"}, + "description": "Keywords or phrases that must NOT be in the document title or body.", + }, + "search_only_in_shared_drive_id": { + "type": "string", + "description": "The ID of the shared drive to restrict the search to.", + }, + "include_shared_drives": { + "type": "boolean", + "description": "Whether to include documents from shared drives.", + "default": False, + }, + "include_organization_domain_documents": { + "type": "boolean", + "description": "Whether to include documents from the organization's domain.", + "default": False, + }, + "order_by": { + "type": "array", + "items": { + "type": "string", + "enum": ["modifiedTime desc", "modifiedTime", "createdTime desc", "createdTime", "name", "name desc"] + }, + "description": "Sort order for the results.", + }, + "limit": { + "type": "integer", + "description": "The number of documents to list.", + "default": 50, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to continue a previous request.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DRIVE_DOCUMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_drive_search_and_retrieve_documents", + description="Search and retrieve the contents of Google documents in the user's Google Drive.", + inputSchema={ + "type": "object", + "properties": { + "return_format": { + "type": "string", + "enum": ["markdown", "html"], + "description": "The format of the document to return.", + "default": "markdown", + }, + "document_contains": { + "type": "array", + "items": {"type": "string"}, + "description": "Keywords or phrases that must be in the document title or body.", + }, + "document_not_contains": { + "type": "array", + "items": {"type": "string"}, + "description": "Keywords or phrases that must NOT be in the document title or body.", + }, + "search_only_in_shared_drive_id": { + "type": "string", + "description": "The ID of the shared drive to restrict the search to.", + }, + "include_shared_drives": { + "type": "boolean", + "description": "Whether to include documents from shared drives.", + "default": False, + }, + "include_organization_domain_documents": { + "type": "boolean", + "description": "Whether to include documents from the organization's domain.", + "default": False, + }, + "order_by": { + "type": "array", + "items": { + "type": "string", + "enum": ["modifiedTime desc", "modifiedTime", "createdTime desc", "createdTime", "name", "name desc"] + }, + "description": "Sort order for the results.", + }, + "limit": { + "type": "integer", + "description": "The number of documents to list.", + "default": 50, + }, + "pagination_token": { + "type": "string", + "description": "The pagination token to continue a previous request.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DRIVE_DOCUMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_drive_get_file_tree_structure", + description="Get the file/folder tree structure of the user's Google Drive.", + inputSchema={ + "type": "object", + "properties": { + "include_shared_drives": { + "type": "boolean", + "description": "Whether to include shared drives in the file tree structure.", + "default": False, + }, + "restrict_to_shared_drive_id": { + "type": "string", + "description": "If provided, only include files from this shared drive in the file tree structure.", + }, + "include_organization_domain_documents": { + "type": "boolean", + "description": "Whether to include documents from the organization's domain.", + "default": False, + }, + "order_by": { + "type": "array", + "items": { + "type": "string", + "enum": ["modifiedTime desc", "modifiedTime", "createdTime desc", "createdTime", "name", "name desc"] + }, + "description": "Sort order for the results.", + }, + "limit": { + "type": "integer", + "description": "The number of files and folders to list.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DRIVE_FILE", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_drive_empty_trash", + description="Permanently delete all of the user's trashed files.", + inputSchema={ + "type": "object", + "properties": { + "drive_id": { + "type": "string", + "description": "If set, empties the trash of the provided shared drive.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DRIVE_FILE"} + ), + ), + types.Tool( + name="google_drive_create_shared_drive", + description="Create a new shared drive.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the shared drive to create.", + }, + "request_id": { + "type": "string", + "description": "Required. An ID, such as a random UUID, which uniquely identifies this user's request for idempotent creation of a shared drive.", + }, + }, + "required": ["name", "request_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_DRIVE_FILE"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "google_drive_search_documents": + try: + result = await search_documents( + document_contains=arguments.get("document_contains"), + document_not_contains=arguments.get("document_not_contains"), + search_only_in_shared_drive_id=arguments.get("search_only_in_shared_drive_id"), + include_shared_drives=arguments.get("include_shared_drives", False), + include_organization_domain_documents=arguments.get("include_organization_domain_documents", False), + order_by=arguments.get("order_by"), + limit=arguments.get("limit", 50), + pagination_token=arguments.get("pagination_token"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_drive_search_and_retrieve_documents": + try: + result = await search_and_retrieve_documents( + return_format=arguments.get("return_format", "markdown"), + document_contains=arguments.get("document_contains"), + document_not_contains=arguments.get("document_not_contains"), + search_only_in_shared_drive_id=arguments.get("search_only_in_shared_drive_id"), + include_shared_drives=arguments.get("include_shared_drives", False), + include_organization_domain_documents=arguments.get("include_organization_domain_documents", False), + order_by=arguments.get("order_by"), + limit=arguments.get("limit", 50), + pagination_token=arguments.get("pagination_token"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_drive_get_file_tree_structure": + try: + result = await get_file_tree_structure( + include_shared_drives=arguments.get("include_shared_drives", False), + restrict_to_shared_drive_id=arguments.get("restrict_to_shared_drive_id"), + include_organization_domain_documents=arguments.get("include_organization_domain_documents", False), + order_by=arguments.get("order_by"), + limit=arguments.get("limit"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_drive_empty_trash": + try: + result = await empty_trash( + drive_id=arguments.get("drive_id"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_drive_create_shared_drive": + try: + name = arguments.get("name", "") + if not name: + raise ValueError("The 'name' argument is required.") + request_id = arguments.get("request_id") + if not request_id: + raise ValueError("The 'request_id' argument is required.") + result = await create_shared_drive( + name=name, + request_id=request_id + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/google_drive/utils.py b/mcp_servers/google_drive/utils.py new file mode 100644 index 00000000..312cd0f8 --- /dev/null +++ b/mcp_servers/google_drive/utils.py @@ -0,0 +1,162 @@ +# doc to html +def convert_document_to_html(document: dict) -> str: + html = ( + "" + f"{document['title']}" + f'' + "" + ) + for element in document["body"]["content"]: + html += convert_structural_element(element) + html += "" + return html + +def convert_structural_element(element: dict, wrap_paragraphs: bool = True) -> str: + if "sectionBreak" in element or "tableOfContents" in element: + return "" + + elif "paragraph" in element: + paragraph_content = "" + + prepend, append = get_paragraph_style_tags( + style=element["paragraph"]["paragraphStyle"], + wrap_paragraphs=wrap_paragraphs, + ) + + for item in element["paragraph"]["elements"]: + if "textRun" not in item: + continue + paragraph_content += extract_paragraph_content(item["textRun"]) + + if not paragraph_content: + return "" + + return f"{prepend}{paragraph_content.strip()}{append}" + + elif "table" in element: + table = [ + [ + "".join([ + convert_structural_element(element=cell_element, wrap_paragraphs=False) + for cell_element in cell["content"] + ]) + for cell in row["tableCells"] + ] + for row in element["table"]["tableRows"] + ] + return table_list_to_html(table) + + else: + raise ValueError(f"Unknown document body element type: {element}") + + +def extract_paragraph_content(text_run: dict) -> str: + content = text_run["content"] + style = text_run["textStyle"] + return apply_text_style(content, style) + + +def apply_text_style(content: str, style: dict) -> str: + content = content.rstrip("\n") + content = content.replace("\n", "
") + italic = style.get("italic", False) + bold = style.get("bold", False) + if italic: + content = f"{content}" + if bold: + content = f"{content}" + return content + + +def get_paragraph_style_tags(style: dict, wrap_paragraphs: bool = True) -> tuple[str, str]: + named_style = style["namedStyleType"] + if named_style == "NORMAL_TEXT": + return ("

", "

") if wrap_paragraphs else ("", "") + elif named_style == "TITLE": + return "

", "

" + elif named_style == "SUBTITLE": + return "

", "

" + elif named_style.startswith("HEADING_"): + try: + heading_level = int(named_style.split("_")[1]) + except ValueError: + return ("

", "

") if wrap_paragraphs else ("", "") + else: + return f"", f"" + return ("

", "

") if wrap_paragraphs else ("", "") + + +def table_list_to_html(table: list[list[str]]) -> str: + html = "" + for row in table: + html += "" + for cell in row: + if cell.endswith("
"): + cell = cell[:-4] + html += f"" + html += "" + html += "
{cell}
" + return html + +# doc to markdown +def convert_document_to_markdown(document: dict) -> str: + md = f"---\ntitle: {document['title']}\ndocumentId: {document['documentId']}\n---\n" + for element in document["body"]["content"]: + md += convert_structural_element(element) + return md + + +def convert_structural_element(element: dict) -> str: + if "sectionBreak" in element or "tableOfContents" in element: + return "" + + elif "paragraph" in element: + md = "" + prepend = get_paragraph_style_prepend_str(element["paragraph"]["paragraphStyle"]) + for item in element["paragraph"]["elements"]: + if "textRun" not in item: + continue + content = extract_paragraph_content(item["textRun"]) + md += f"{prepend}{content}" + return md + + elif "table" in element: + return convert_structural_element(element) + + else: + raise ValueError(f"Unknown document body element type: {element}") + + +def extract_paragraph_content(text_run: dict) -> str: + content = text_run["content"] + style = text_run["textStyle"] + return apply_text_style(content, style) + + +def apply_text_style(content: str, style: dict) -> str: + append = "\n" if content.endswith("\n") else "" + content = content.rstrip("\n") + italic = style.get("italic", False) + bold = style.get("bold", False) + if italic: + content = f"_{content}_" + if bold: + content = f"**{content}**" + return f"{content}{append}" + + +def get_paragraph_style_prepend_str(style: dict) -> str: + named_style = style["namedStyleType"] + if named_style == "NORMAL_TEXT": + return "" + elif named_style == "TITLE": + return "# " + elif named_style == "SUBTITLE": + return "## " + elif named_style.startswith("HEADING_"): + try: + heading_level = int(named_style.split("_")[1]) + return f"{'#' * heading_level} " + except ValueError: + return "" + return "" diff --git a/mcp_servers/google_jobs/.env.example b/mcp_servers/google_jobs/.env.example new file mode 100644 index 00000000..1ef39208 --- /dev/null +++ b/mcp_servers/google_jobs/.env.example @@ -0,0 +1,2 @@ +SERPAPI_API_KEY="YOUR_SERPAPI_KEY_HERE" # https://serpapi.com/google-jobs-api +GOOGLE_JOBS_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/google_jobs/Dockerfile b/mcp_servers/google_jobs/Dockerfile new file mode 100644 index 00000000..4803aeac --- /dev/null +++ b/mcp_servers/google_jobs/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/google_jobs/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy server code and tools +COPY mcp_servers/google_jobs/server.py . +COPY mcp_servers/google_jobs/tools/ ./tools/ + +# Expose port (change if your server uses another) +EXPOSE 5000 + +# Run the server +CMD ["python", "server.py"] diff --git a/mcp_servers/google_jobs/README.md b/mcp_servers/google_jobs/README.md new file mode 100644 index 00000000..ab29e35d --- /dev/null +++ b/mcp_servers/google_jobs/README.md @@ -0,0 +1,73 @@ +# Google Jobs MCP Server + +A Model Context Protocol (MCP) server for Google Jobs API integration. Search and access job listings using Google's Jobs API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Jobs with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_JOBS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-jobs-mcp-server:latest + + +# Run Google Jobs MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/google-jobs-mcp-server:latest +``` + +**API Key Setup:** Get your Google API key from the [Google Cloud Console](https://console.cloud.google.com/apis/credentials) and enable the Google Jobs API. + +## šŸ› ļø Available Tools + +- **Job Search**: Search for job listings by keywords, location, and filters +- **Job Details**: Get detailed information about specific job postings +- **Company Information**: Access employer details and company profiles +- **Location-based Search**: Find jobs in specific geographic areas +- **Filter Options**: Apply various filters for salary, experience, job type + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_jobs/requirements.txt b/mcp_servers/google_jobs/requirements.txt new file mode 100644 index 00000000..23829d8d --- /dev/null +++ b/mcp_servers/google_jobs/requirements.txt @@ -0,0 +1,7 @@ +mcp==1.11.0 +python-dotenv>=1.0.0 +typing-extensions>=4.8.0 +starlette>=0.36.0 +uvicorn[standard]>=0.24.0 +aiohttp>=3.9.0 +click>=8.1.0 diff --git a/mcp_servers/google_jobs/server.py b/mcp_servers/google_jobs/server.py new file mode 100644 index 00000000..7160f577 --- /dev/null +++ b/mcp_servers/google_jobs/server.py @@ -0,0 +1,570 @@ +import os +import base64 +import logging +import contextlib +import json +from collections.abc import AsyncIterator +from typing import Any, Dict, List, Optional + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response, JSONResponse +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from starlette.requests import Request + +from tools import ( + serpapi_token_context, + search_jobs, + get_job_details, + search_jobs_by_company, + search_remote_jobs, + get_job_search_suggestions, +) + +load_dotenv() + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("google-jobs-mcp-server") + +GOOGLE_JOBS_MCP_SERVER_PORT = int(os.getenv("GOOGLE_JOBS_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + auth_data = None + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=GOOGLE_JOBS_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + app = Server("google-jobs-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="google_jobs_search", + description="Search for job listings on Google Jobs. Supports filtering by location, date posted, employment type, salary, company, and more.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Job search query (e.g., 'software engineer', 'marketing manager')" + }, + "location": { + "type": "string", + "description": "Location to search for jobs (e.g., 'New York, NY', 'Remote', 'San Francisco')" + }, + "date_posted": { + "type": "string", + "description": "Filter by posting date", + "enum": ["today", "3days", "week", "month"] + }, + "employment_type": { + "type": "string", + "description": "Type of employment", + "enum": ["FULLTIME", "PARTTIME", "CONTRACTOR", "INTERN"] + }, + "salary_min": { + "type": "integer", + "description": "Minimum annual salary in USD" + }, + "company": { + "type": "string", + "description": "Filter by specific company name" + }, + "radius": { + "type": "integer", + "description": "Search radius in miles from the specified location (default: 25)" + }, + "start": { + "type": "integer", + "description": "Starting position for pagination (default: 0)", + "default": 0 + } + } + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_JOBS_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="google_jobs_get_details", + description="Get comprehensive details about a specific job listing including full description, requirements, benefits, and application information.", + inputSchema={ + "type": "object", + "required": ["job_id"], + "properties": { + "job_id": { + "type": "string", + "description": "Unique job identifier from search results" + } + } + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_JOBS_DETAILS", "readOnlyHint": True}) + ), + types.Tool( + name="google_jobs_search_by_company", + description="Search for all job openings at a specific company. Returns current job listings with location and role information.", + inputSchema={ + "type": "object", + "required": ["company_name"], + "properties": { + "company_name": { + "type": "string", + "description": "Name of the company to search for jobs" + }, + "location": { + "type": "string", + "description": "Optional location filter (e.g., 'New York, NY', 'Remote')" + }, + "employment_type": { + "type": "string", + "description": "Type of employment", + "enum": ["FULLTIME", "PARTTIME", "CONTRACTOR", "INTERN"] + }, + "start": { + "type": "integer", + "description": "Starting position for pagination (default: 0)", + "default": 0 + } + } + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_JOBS_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="google_jobs_search_remote", + description="Search specifically for remote job opportunities. Filters for jobs that can be done remotely or are explicitly marked as remote positions.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Job search query (e.g., 'software engineer', 'marketing manager')" + }, + "employment_type": { + "type": "string", + "description": "Type of employment", + "enum": ["FULLTIME", "PARTTIME", "CONTRACTOR", "INTERN"] + }, + "date_posted": { + "type": "string", + "description": "Filter by posting date", + "enum": ["today", "3days", "week", "month"] + }, + "salary_min": { + "type": "integer", + "description": "Minimum annual salary in USD" + }, + "start": { + "type": "integer", + "description": "Starting position for pagination (default: 0)", + "default": 0 + } + } + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_JOBS_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="google_jobs_get_suggestions", + description="Get search suggestions and related job titles based on a query. Useful for discovering similar roles or refining search terms.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Base job query to get suggestions for" + } + } + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_JOBS_SUGGESTION", "readOnlyHint": True}) + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "google_jobs_search": + query = arguments.get("query") + location = arguments.get("location") + date_posted = arguments.get("date_posted") + employment_type = arguments.get("employment_type") + salary_min = arguments.get("salary_min") + company = arguments.get("company") + radius = arguments.get("radius") + start = arguments.get("start", 0) + + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + try: + result = await search_jobs( + query=query, + location=location, + date_posted=date_posted, + employment_type=employment_type, + salary_min=salary_min, + company=company, + radius=radius, + start=start + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_jobs_get_details": + job_id = arguments.get("job_id") + + if not job_id: + return [ + types.TextContent( + type="text", + text="Error: job_id parameter is required", + ) + ] + try: + result = await get_job_details(job_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_jobs_search_by_company": + company_name = arguments.get("company_name") + location = arguments.get("location") + employment_type = arguments.get("employment_type") + start = arguments.get("start", 0) + + if not company_name: + return [ + types.TextContent( + type="text", + text="Error: company_name parameter is required", + ) + ] + try: + result = await search_jobs_by_company( + company_name=company_name, + location=location, + employment_type=employment_type, + start=start + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_jobs_search_remote": + query = arguments.get("query") + employment_type = arguments.get("employment_type") + date_posted = arguments.get("date_posted") + salary_min = arguments.get("salary_min") + start = arguments.get("start", 0) + + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + try: + result = await search_remote_jobs( + query=query, + employment_type=employment_type, + date_posted=date_posted, + salary_min=salary_min, + start=start + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_jobs_get_suggestions": + query = arguments.get("query") + + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + try: + result = await get_job_search_suggestions(query) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + sse = SseServerTransport("/messages") + + async def handle_sse(request: Request): + """Handle SSE connections.""" + logger.info("Handling SSE connection") + + auth_token = extract_api_key(request) + + token = serpapi_token_context.set(auth_token or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + except Exception as e: + logger.exception(f"Error in SSE handler: {e}") + return Response(f"Internal server error: {str(e)}", status_code=500) + finally: + serpapi_token_context.reset(token) + + return Response() + + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + """Handle StreamableHTTP requests.""" + logger.info(f"Handling StreamableHTTP request: {scope['method']} {scope['path']}") + + if scope["method"] != "POST": + await send({ + "type": "http.response.start", + "status": 405, + "headers": [ + [b"content-type", b"application/json"], + [b"allow", b"POST"] + ], + }) + await send({ + "type": "http.response.body", + "body": json.dumps({ + "error": "Method not allowed. Only POST requests are supported for MCP endpoints." + }).encode(), + }) + return + + auth_token = extract_api_key(scope) + + token = serpapi_token_context.set(auth_token or "") + try: + await session_manager.handle_request(scope, receive, send) + except Exception as e: + logger.exception(f"Error in StreamableHTTP handler: {e}") + try: + await send({ + "type": "http.response.start", + "status": 500, + "headers": [[b"content-type", b"application/json"]], + }) + await send({ + "type": "http.response.body", + "body": json.dumps({"error": f"Internal server error: {str(e)}"}).encode(), + }) + except Exception: + logger.warning("Could not send error response, connection may be closed") + finally: + serpapi_token_context.reset(token) + + async def handle_mcp_info(request: Request) -> JSONResponse: + """Handle GET requests to MCP endpoint with information.""" + return JSONResponse({ + "error": "Method not allowed", + "message": "This endpoint only accepts POST requests for MCP protocol communication.", + "supported_methods": ["POST"], + "usage": "Send MCP protocol messages via POST requests to this endpoint." + }, status_code=405) + + async def handle_root(request: Request) -> JSONResponse: + """Handle root endpoint with server info.""" + return JSONResponse({ + "name": "google-jobs-mcp-server", + "version": "1.0.0", + "description": "MCP server for Google Jobs search functionality", + "endpoints": { + "sse": "/sse", + "streamable_http": "/mcp", + "messages": "/messages" + }, + "tools": [ + "google_jobs_search", + "google_jobs_get_details", + "google_jobs_search_by_company", + "google_jobs_search_remote", + "google_jobs_get_suggestions" + ], + "status": "running" + }) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + try: + async with session_manager.run(): + logger.info("Application started with dual transports!") + yield + except Exception as e: + logger.exception(f"Error in lifespan manager: {e}") + raise + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # Root endpoint + Route("/", endpoint=handle_root, methods=["GET"]), + + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages", app=sse.handle_post_message), + + # StreamableHTTP routes + Route("/mcp", endpoint=handle_mcp_info, methods=["GET"]), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - Root endpoint: http://localhost:{port}/") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + logger.info(f" - Messages endpoint: http://localhost:{port}/messages") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() + diff --git a/mcp_servers/google_jobs/tools/__init__.py b/mcp_servers/google_jobs/tools/__init__.py new file mode 100644 index 00000000..4da3447d --- /dev/null +++ b/mcp_servers/google_jobs/tools/__init__.py @@ -0,0 +1,20 @@ +from .jobs import ( + search_jobs, + get_job_details, + search_jobs_by_company, + search_remote_jobs, + get_job_search_suggestions +) +from .base import serpapi_token_context + +__all__ = [ + # Jobs + "search_jobs", + "get_job_details", + "search_jobs_by_company", + "search_remote_jobs", + "get_job_search_suggestions", + + # Base + "serpapi_token_context", +] diff --git a/mcp_servers/google_jobs/tools/base.py b/mcp_servers/google_jobs/tools/base.py new file mode 100644 index 00000000..05d44ae2 --- /dev/null +++ b/mcp_servers/google_jobs/tools/base.py @@ -0,0 +1,96 @@ +import os +import logging +import ssl +from typing import Any, Dict, Optional +from contextvars import ContextVar +import aiohttp + +logger = logging.getLogger(__name__) + +SERPAPI_BASE_URL = "https://serpapi.com/search" + +serpapi_token_context: ContextVar[str] = ContextVar('serpapi_token') + + +def get_serpapi_access_token() -> str: + """Get the SerpApi access token from context or environment.""" + try: + return serpapi_token_context.get() + except LookupError: + token = os.getenv("SERPAPI_API_KEY") + if not token: + raise RuntimeError("SerpApi API key not found in request context or environment") + return token + +def _get_serpapi_headers() -> Dict[str, str]: + """Create standard headers for SerpApi calls.""" + return { + "User-Agent": "MCP Google Jobs Server", + "Accept": "application/json" + } + +def _get_ssl_context(): + """Create secure SSL context.""" + return ssl.create_default_context() + +async def make_serpapi_request( + params: Dict[str, Any], + expect_empty_response: bool = False +) -> Any: + """ + Makes an HTTP request to the SerpApi. + + Args: + params: Query parameters for the API request + expect_empty_response: Whether to expect an empty response + + Returns: + Response data as dict, or None for empty responses + """ + api_key = get_serpapi_access_token() + params["api_key"] = api_key + params["engine"] = params.get("engine", "google_jobs") + + url = SERPAPI_BASE_URL + headers = _get_serpapi_headers() + + connector = aiohttp.TCPConnector(ssl=_get_ssl_context()) + async with aiohttp.ClientSession(headers=headers, connector=connector) as session: + try: + async with session.get(url, params=params) as response: + response.raise_for_status() + + if expect_empty_response: + if response.status in [200, 201, 204]: + return None + else: + logger.warning(f"Expected empty response for SerpApi request, but got status {response.status}") + try: + return await response.json() + except aiohttp.ContentTypeError: + return await response.text() + else: + if 'application/json' in response.headers.get('Content-Type', ''): + data = await response.json() + + if "error" in data: + raise RuntimeError(f"SerpApi error: {data['error']}") + + return data + else: + text_content = await response.text() + logger.warning(f"Received non-JSON response from SerpApi: {text_content[:100]}...") + return {"raw_content": text_content} + + except aiohttp.ClientResponseError as e: + logger.error(f"SerpApi request failed: {e.status} {e.message} for {url}") + error_details = e.message + try: + error_body = await e.response.json() + error_details = f"{e.message} - {error_body}" + except Exception: + pass + raise RuntimeError(f"SerpApi Error ({e.status}): {error_details}") from e + except Exception as e: + logger.error(f"An unexpected error occurred during SerpApi request: {e}") + raise RuntimeError(f"Unexpected error during API call to SerpApi") from e diff --git a/mcp_servers/google_jobs/tools/jobs.py b/mcp_servers/google_jobs/tools/jobs.py new file mode 100644 index 00000000..66efb3c1 --- /dev/null +++ b/mcp_servers/google_jobs/tools/jobs.py @@ -0,0 +1,287 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_serpapi_request + +logger = logging.getLogger(__name__) + +async def search_jobs( + query: str, + location: Optional[str] = None, + date_posted: Optional[str] = None, + employment_type: Optional[str] = None, + salary_min: Optional[int] = None, + company: Optional[str] = None, + radius: Optional[int] = None, + start: int = 0 +) -> Dict[str, Any]: + """Search for jobs on Google Jobs.""" + logger.info(f"Executing tool: search_jobs with query: {query}") + try: + params = {"q": query} + + if location: + params["location"] = location + if date_posted: + params["date_posted"] = date_posted + if employment_type: + params["employment_type"] = employment_type + if salary_min: + params["salary_min"] = salary_min + if company: + params["company"] = company + if radius: + params["radius"] = radius + if start > 0: + params["start"] = start + + result = await make_serpapi_request(params) + + jobs = result.get("jobs_results", []) + total_results = result.get("search_information", {}).get("total_results", 0) + + if not jobs: + return { + "message": f"No job listings found for query: '{query}'", + "total_results": 0, + "jobs": [] + } + + formatted_jobs = [] + for job in jobs[:10]: # limiting to first 10 results + job_info = { + "title": job.get("title", "N/A"), + "company": job.get("company_name", "N/A"), + "location": job.get("location", "N/A"), + "description_snippet": job.get("description", "N/A")[:200] + "..." if job.get("description") else "N/A", + "posted_date": job.get("detected_extensions", {}).get("posted_at", "N/A"), + "job_id": job.get("job_id", "N/A"), + "apply_options": [opt.get("link", "") for opt in job.get("apply_options", [])] + } + formatted_jobs.append(job_info) + + response = { + "total_results": total_results, + "showing_results": len(formatted_jobs), + "search_query": query, + "location_filter": location, + "jobs": formatted_jobs + } + + return response + + except Exception as e: + logger.exception(f"Error executing tool search_jobs: {e}") + return { + "error": "Job search failed", + "query": query, + "exception": str(e) + } + +async def get_job_details(job_id: str) -> Dict[str, Any]: + """Get detailed information about a specific job listing.""" + logger.info(f"Executing tool: get_job_details with job_id: {job_id}") + try: + params = { + "engine": "google_jobs_listing", + "q": job_id + } + + result = await make_serpapi_request(params) + + # Extract job details + job_details = result.get("job", {}) + + if not job_details: + return { + "error": f"No job details found for job ID: {job_id}", + "job_id": job_id + } + + formatted_details = { + "job_id": job_id, + "title": job_details.get("title", "N/A"), + "company": job_details.get("company_name", "N/A"), + "location": job_details.get("location", "N/A"), + "employment_type": job_details.get("detected_extensions", {}).get("employment_type", "N/A"), + "posted_date": job_details.get("detected_extensions", {}).get("posted_at", "N/A"), + "salary": job_details.get("detected_extensions", {}).get("salary", "N/A"), + "description": job_details.get("description", "N/A"), + "qualifications": job_details.get("qualifications", []), + "responsibilities": job_details.get("responsibilities", []), + "apply_options": job_details.get("apply_options", []) + } + + return formatted_details + + except Exception as e: + logger.exception(f"Error executing tool get_job_details: {e}") + return { + "error": "Failed to get job details", + "job_id": job_id, + "exception": str(e) + } + +async def search_jobs_by_company( + company_name: str, + location: Optional[str] = None, + employment_type: Optional[str] = None, + start: int = 0 +) -> Dict[str, Any]: + """Search for all job openings at a specific company.""" + logger.info(f"Executing tool: search_jobs_by_company with company: {company_name}") + try: + # Use company name as both query and company filter + query = f"jobs at {company_name}" + + params = {"q": query} + + if location: + params["location"] = location + if employment_type: + params["employment_type"] = employment_type + if company_name: + params["company"] = company_name + if start > 0: + params["start"] = start + + result = await make_serpapi_request(params) + + jobs = result.get("jobs_results", []) + + if not jobs: + return { + "message": f"No job listings found for company: {company_name}", + "company": company_name, + "jobs": [] + } + + formatted_jobs = [] + for job in jobs: + job_info = { + "title": job.get("title", "N/A"), + "location": job.get("location", "N/A"), + "employment_type": job.get("detected_extensions", {}).get("employment_type", "N/A"), + "posted_date": job.get("detected_extensions", {}).get("posted_at", "N/A"), + "job_id": job.get("job_id", "N/A"), + "description_snippet": job.get("description", "N/A")[:150] + "..." if job.get("description") else "N/A" + } + formatted_jobs.append(job_info) + + response = { + "company": company_name, + "total_jobs_found": len(formatted_jobs), + "location_filter": location, + "jobs": formatted_jobs + } + + return response + + except Exception as e: + logger.exception(f"Error executing tool search_jobs_by_company: {e}") + return { + "error": "Company job search failed", + "company_name": company_name, + "exception": str(e) + } + +async def search_remote_jobs( + query: str, + employment_type: Optional[str] = None, + date_posted: Optional[str] = None, + salary_min: Optional[int] = None, + start: int = 0 +) -> Dict[str, Any]: + """Search specifically for remote job opportunities.""" + logger.info(f"Executing tool: search_remote_jobs with query: {query}") + try: + # Modify query to include remote keywords + remote_query = f"{query} remote" + + params = {"q": remote_query, "location": "Remote"} + + if employment_type: + params["employment_type"] = employment_type + if date_posted: + params["date_posted"] = date_posted + if salary_min: + params["salary_min"] = salary_min + if start > 0: + params["start"] = start + + result = await make_serpapi_request(params) + + jobs = result.get("jobs_results", []) + + if not jobs: + return { + "message": f"No remote job listings found for query: '{query}'", + "search_query": query, + "jobs": [] + } + + remote_jobs = [] + for job in jobs: + location = job.get("location", "").lower() + title = job.get("title", "").lower() + description = job.get("description", "").lower() + + if any(remote_keyword in location or remote_keyword in title or remote_keyword in description + for remote_keyword in ["remote", "work from home", "anywhere", "virtual"]): + job_info = { + "title": job.get("title", "N/A"), + "company": job.get("company_name", "N/A"), + "location": job.get("location", "N/A"), + "employment_type": job.get("detected_extensions", {}).get("employment_type", "N/A"), + "posted_date": job.get("detected_extensions", {}).get("posted_at", "N/A"), + "salary": job.get("detected_extensions", {}).get("salary", "N/A"), + "job_id": job.get("job_id", "N/A"), + "description_snippet": job.get("description", "N/A")[:200] + "..." if job.get("description") else "N/A" + } + remote_jobs.append(job_info) + + response = { + "search_query": query, + "remote_jobs_found": len(remote_jobs), + "jobs": remote_jobs + } + + return response + + except Exception as e: + logger.exception(f"Error executing tool search_remote_jobs: {e}") + return { + "error": "Remote job search failed", + "query": query, + "exception": str(e) + } + +async def get_job_search_suggestions(query: str) -> Dict[str, Any]: + """Get search suggestions and related job titles based on a query.""" + logger.info(f"Executing tool: get_job_search_suggestions with query: {query}") + try: + params = {"q": query} + + result = await make_serpapi_request(params) + + # Extract job titles and companies for suggestions + jobs = result.get("jobs_results", []) + related_searches = result.get("related_searches", []) + + suggestions = { + "original_query": query, + "related_searches": [search.get("query", "") for search in related_searches], + "popular_job_titles": list(set([job.get("title", "") for job in jobs[:10] if job.get("title")])), + "companies_hiring": list(set([job.get("company_name", "") for job in jobs[:10] if job.get("company_name")])), + "common_locations": list(set([job.get("location", "") for job in jobs[:10] if job.get("location")])) + } + + return suggestions + + except Exception as e: + logger.exception(f"Error executing tool get_job_search_suggestions: {e}") + return { + "error": "Failed to get search suggestions", + "query": query, + "exception": str(e) + } + diff --git a/mcp_servers/google_meet/.env.example b/mcp_servers/google_meet/.env.example new file mode 100644 index 00000000..73c10c7f --- /dev/null +++ b/mcp_servers/google_meet/.env.example @@ -0,0 +1,4 @@ +GOOGLE_MEET_MCP_SERVER_PORT=5000 +LOG_LEVEL=INFO +MEET_API_DISABLED_ENV=MEET_API_V2_DISABLED +MEET_API_BASE=https://meet.googleapis.com/v2 diff --git a/mcp_servers/google_meet/README.md b/mcp_servers/google_meet/README.md new file mode 100644 index 00000000..57e05353 --- /dev/null +++ b/mcp_servers/google_meet/README.md @@ -0,0 +1,106 @@ +# Google Meet MCP Server + +Lightweight MCP server for Google Meet (Google Calendar) events: create, list (upcoming & past), update, delete, fetch details + past attendees. Sends invites / update emails when you want. + +## Tools + +create: google_meet_create_meet (notify_attendees default true) + +list upcoming: google_meet_list_meetings + +list past: google_meet_list_past_meetings + +details: google_meet_get_meeting_details + +update: google_meet_update_meeting (change detection + optional notify) + +delete: google_meet_delete_meeting + +past attendees: google_meet_get_past_meeting_attendees + +## Quick Start + +```bash +uv venv +./.venv/Scripts/Activate.ps1 # (PowerShell) | source .venv/bin/activate (bash) +uv pip install -r requirements.txt +set GOOGLE_MEET_MCP_SERVER_PORT=5000 +set AUTH_DATA={"access_token":"ya29.your_token"} +uv run server.py --stdio # stdio mode (Claude Desktop) +# OR HTTP/SSE +uv run server.py --port 5000 --log-level INFO +``` + +HTTP endpoints: + +SSE: + +StreamableHTTP: + +## Auth + +Two ways: + +1. Stdio: AUTH_DATA env JSON {"access_token":"..."} +2. HTTP/SSE: header x-auth-data = base64(JSON with access_token) + +Scopes: needs calendar events write (e.g. ). + +## Create Example + +```json +{ + "name": "google_meet_create_meet", + "arguments": { + "summary": "Team Sync", + "start_time": "2025-09-05T10:00:00Z", + "end_time": "2025-09-05T10:30:00Z", + "attendees": ["a@example.com","b@example.com"], + "description": "Daily standup", + "notify_attendees": true + } +} +``` + +## Past Meetings + +```json +{ + "name": "google_meet_list_past_meetings", + "arguments": {"max_results": 5} +} +``` + +## Update Meeting (resend notifications) + +```json +{ + "name": "google_meet_update_meeting", + "arguments": { + "event_id": "abc123", + "start_time": "2025-09-05T11:00:00Z", + "end_time": "2025-09-05T11:30:00Z", + "notify_attendees": true + } +} +``` + +## Behavior Notes + +- Invitations: create/update uses sendUpdates=all when notify_attendees true. +- Change detection: update skips API call + emails if nothing changed. +- Meet detection: conferenceData.entryPoints OR hangoutLink. +- Past list: excludes all‑day events; 30‑day default lookback. +- Attendees fetch: only allowed for ended events. + +## Package Layout + +```text +server.py # tooling + transports +tools/ + base.py # core logic + utils.py # validation + shaping +``` +## License + +MIT diff --git a/mcp_servers/google_meet/dockerfile b/mcp_servers/google_meet/dockerfile new file mode 100644 index 00000000..39528fec --- /dev/null +++ b/mcp_servers/google_meet/dockerfile @@ -0,0 +1,29 @@ +FROM python:3.12-slim AS base + +ENV PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 \ + PYTHONDONTWRITEBYTECODE=1 + +WORKDIR /app + +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates curl \ + && rm -rf /var/lib/apt/lists/* + +COPY requirements.txt ./mcp_servers/google_meet/requirements.txt +WORKDIR /app/mcp_servers/google_meet +RUN pip install --no-cache-dir -r requirements.txt + +COPY . /app + +RUN useradd -m appuser && chown -R appuser /app +USER appuser + +ENV GOOGLE_MEET_MCP_SERVER_PORT=5000 \ + LOG_LEVEL=INFO + +EXPOSE 5000 + +HEALTHCHECK --interval=30s --timeout=3s --retries=3 CMD ["python","-c","import socket,os,sys;port=int(os.getenv('GOOGLE_MEET_MCP_SERVER_PORT','5000'));s=socket.socket();s.settimeout(2);\n\nimport contextlib;\ntry:\n s.connect(('127.0.0.1',port))\nexcept Exception as e:\n print('healthcheck fail',e);sys.exit(1)\nfinally:\n s.close();print('ok')"] + +ENTRYPOINT ["python","server.py","--port","5000","--log-level","INFO"] diff --git a/mcp_servers/google_meet/pyproject.toml b/mcp_servers/google_meet/pyproject.toml new file mode 100644 index 00000000..468c2d5a --- /dev/null +++ b/mcp_servers/google_meet/pyproject.toml @@ -0,0 +1,33 @@ +[project] +name = "google-meet-mcp-server" +version = "0.1.0" +description = "MCP server for Google Meet video conferencing" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "mcp==1.11.0", + "pydantic", + "fastapi", + "uvicorn[standard]", + "python-dotenv", + "typing-extensions", + "google-auth", + "google-auth-oauthlib", + "google-auth-httplib2", + "google-api-python-client", + "click", + "starlette", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["."] + +[tool.uv] +dev-dependencies = [] + +[project.scripts] +google-meet-mcp-server = "server:main" diff --git a/mcp_servers/google_meet/requirements.txt b/mcp_servers/google_meet/requirements.txt new file mode 100644 index 00000000..82457943 --- /dev/null +++ b/mcp_servers/google_meet/requirements.txt @@ -0,0 +1,12 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +google-auth +google-auth-oauthlib +google-auth-httplib2 +google-api-python-client +click +starlette \ No newline at end of file diff --git a/mcp_servers/google_meet/server.py b/mcp_servers/google_meet/server.py new file mode 100644 index 00000000..b9855b15 --- /dev/null +++ b/mcp_servers/google_meet/server.py @@ -0,0 +1,416 @@ +import contextlib +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv +from googleapiclient.errors import HttpError + +try: # Support running as module or script + from .tools.utils import ( + ValidationError, + validate_time_window, + validate_attendees, + parse_rfc3339, + success, + failure, + shape_meeting, + http_error_to_message, + ) +except ImportError: # Fallback when executed directly + from tools.utils import ( + ValidationError, + validate_time_window, + validate_attendees, + parse_rfc3339, + success, + failure, + shape_meeting, + http_error_to_message, + ) + +logger = logging.getLogger(__name__) +load_dotenv() +GOOGLE_MEET_MCP_SERVER_PORT = int(os.getenv("GOOGLE_MEET_MCP_SERVER_PORT", "5000")) + +# Import core logic from tools.base +try: + from .tools.base import ( + auth_token_context, + extract_access_token, + get_auth_token, + create_meet, + list_meetings, + list_past_meetings, + get_meeting_details, + update_meeting, + delete_meeting, + get_past_meeting_attendees, + ) + from .tools import meet_api as meet_v2 +except ImportError: + from tools.base import ( + auth_token_context, + extract_access_token, + get_auth_token, + create_meet, + list_meetings, + list_past_meetings, + get_meeting_details, + update_meeting, + delete_meeting, + get_past_meeting_attendees, + ) + import tools.meet_api as meet_v2 + +## Core tool implementations now imported from tools.base + +@click.command() +@click.option("--port", default=GOOGLE_MEET_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +@click.option("--json-response", is_flag=True, default=False, help="Enable JSON responses for StreamableHTTP instead of SSE streams") +@click.option("--stdio", is_flag=True, default=False, help="Run with stdio transport instead of HTTP") +def main(port: int, log_level: str, json_response: bool, stdio: bool) -> int: + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + app = Server("google-meet-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + tools = [ + types.Tool( + name="google_meet_create_meet", + description="Create a new Google Meet meeting (via Calendar event)", + inputSchema={ + "type": "object", + "required": ["summary", "start_time", "end_time", "attendees"], + "properties": { + "summary": {"type": "string", "description": "Meeting title"}, + "start_time": {"type": "string", "description": "ISO RFC3339 datetime"}, + "end_time": {"type": "string", "description": "ISO RFC3339 datetime"}, + "attendees": {"type": "array", "items": {"type": "string"}, "description": "List of attendee email addresses"}, + "description": {"type": "string", "description": "Meeting description"}, + "notify_attendees": {"type": "boolean", "description": "Send calendar invitations to attendees (default true)", "default": True}, + }, + }, + ), + types.Tool( + name="google_meet_list_meetings", + description="List upcoming Google Meet meetings from the user's calendar", + inputSchema={ + "type": "object", + "properties": { + "max_results": {"type": "integer", "description": "Maximum number of meetings to return (1-100)", "default": 10}, + "start_after": {"type": "string", "description": "Only meetings starting after this RFC3339 UTC time"}, + "end_before": {"type": "string", "description": "Only meetings starting before this RFC3339 UTC time"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING", "readOnlyHint": True}), + ), + types.Tool( + name="google_meet_list_past_meetings", + description="List past Google Meet meetings (already ended) in descending recency", + inputSchema={ + "type": "object", + "properties": { + "max_results": {"type": "integer", "description": "Maximum number of past meetings to return (1-100)", "default": 10}, + "since": {"type": "string", "description": "Only include meetings starting after this RFC3339 UTC time"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING", "readOnlyHint": True}), + ), + types.Tool( + name="google_meet_get_meeting_details", + description="Get details of a specific Google Meet meeting", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": {"type": "string", "description": "The calendar event ID of the meeting"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING", "readOnlyHint": True}), + ), + types.Tool( + name="google_meet_update_meeting", + description="Update an existing Google Meet meeting", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": {"type": "string", "description": "The calendar event ID of the meeting"}, + "summary": {"type": "string", "description": "New meeting title"}, + "start_time": {"type": "string", "description": "New start time (ISO RFC3339)"}, + "end_time": {"type": "string", "description": "New end time (ISO RFC3339)"}, + "attendees": {"type": "array", "items": {"type": "string"}, "description": "New list of attendee email addresses"}, + "description": {"type": "string", "description": "New meeting description"}, + "notify_attendees": {"type": "boolean", "description": "Send updated invitations / notifications (default true)", "default": True}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING"}), + ), + types.Tool( + name="google_meet_delete_meeting", + description="Delete a Google Meet meeting", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": {"type": "string", "description": "The calendar event ID of the meeting to delete"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING"}), + ), + types.Tool( + name="google_meet_get_past_meeting_attendees", + description="Get attendees (with response statuses) for a past meeting (fails if meeting not ended)", + inputSchema={ + "type": "object", + "required": ["event_id"], + "properties": { + "event_id": {"type": "string", "description": "The calendar event ID of the past meeting"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_MEETING", "readOnlyHint": True}), + ), + ] + tools.extend([ + types.Tool( + name="google_meet_v2_create_instant", + description="Create an instant ad-hoc Google Meet (Meet API v2, Workspace/EDU only)", + inputSchema={"type": "object", "properties": {}}, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_V2"}), + ), + types.Tool( + name="google_meet_v2_get_meeting", + description="Get Meet API v2 meeting (space) details (Workspace/EDU)", + inputSchema={ + "type": "object", + "required": ["space_id"], + "properties": {"space_id": {"type": "string", "description": "Space ID or spaces/"}}, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_V2", "readOnlyHint": True}), + ), + types.Tool( + name="google_meet_v2_list_meetings", + description="List Meet API v2 meetings/spaces (Workspace/EDU)", + inputSchema={ + "type": "object", + "properties": {"max_results": {"type": "integer", "default": 10, "description": "1-100"}}, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_V2", "readOnlyHint": True}), + ), + types.Tool( + name="google_meet_v2_get_participants", + description="Get participants for a Meet API v2 meeting (Workspace/EDU)", + inputSchema={ + "type": "object", + "required": ["space_id"], + "properties": { + "space_id": {"type": "string", "description": "Space ID or spaces/"}, + "max_results": {"type": "integer", "default": 50, "description": "1-300"}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_MEET_V2", "readOnlyHint": True}), + ), + ]) + return tools + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent]: + if name == "google_meet_create_meet": + summary = arguments.get("summary") + start_time = arguments.get("start_time") + end_time = arguments.get("end_time") + attendees = arguments.get("attendees", []) + description = arguments.get("description", "") + notify_attendees = arguments.get("notify_attendees", True) + result = await create_meet(summary, start_time, end_time, attendees, description, notify_attendees) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_list_meetings": + max_results = arguments.get("max_results", 10) + start_after = arguments.get("start_after") + end_before = arguments.get("end_before") + result = await list_meetings(max_results, start_after, end_before) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_list_past_meetings": + max_results = arguments.get("max_results", 10) + since = arguments.get("since") + result = await list_past_meetings(max_results, since) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_get_meeting_details": + event_id = arguments.get("event_id") + if not event_id: + return [types.TextContent(type="text", text=json.dumps(failure("event_id parameter is required")))] + result = await get_meeting_details(event_id) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_update_meeting": + event_id = arguments.get("event_id") + if not event_id: + return [types.TextContent(type="text", text=json.dumps(failure("event_id parameter is required")))] + summary = arguments.get("summary") + start_time = arguments.get("start_time") + end_time = arguments.get("end_time") + attendees = arguments.get("attendees") + description = arguments.get("description") + notify_attendees = arguments.get("notify_attendees", True) + result = await update_meeting(event_id, summary, start_time, end_time, attendees, description, notify_attendees) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_delete_meeting": + event_id = arguments.get("event_id") + if not event_id: + return [types.TextContent(type="text", text=json.dumps(failure("event_id parameter is required")))] + result = await delete_meeting(event_id) + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_get_past_meeting_attendees": + event_id = arguments.get("event_id") + if not event_id: + return [types.TextContent(type="text", text=json.dumps(failure("event_id parameter is required")))] + result = await get_past_meeting_attendees(event_id) + return [types.TextContent(type="text", text=json.dumps(result))] + # ----------------Meet API v2 tools----------------- + elif name == "google_meet_v2_create_instant": + try: + result = await meet_v2.create_instant_meeting() + except Exception as e: # Safety net + logger.exception("meet_v2_create_instant unexpected error=%s", e) + result = failure("Unexpected server error", code="internal_error") + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_v2_get_meeting": + space_id = arguments.get("space_id") + if not space_id: + return [types.TextContent(type="text", text=json.dumps(failure("space_id parameter is required")))] + try: + result = await meet_v2.get_meeting(space_id) + except Exception as e: + logger.exception("meet_v2_get_meeting unexpected error=%s", e) + result = failure("Unexpected server error", code="internal_error") + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_v2_list_meetings": + max_results = arguments.get("max_results", 10) + try: + result = await meet_v2.list_meetings(max_results) + except Exception as e: + logger.exception("meet_v2_list_meetings unexpected error=%s", e) + result = failure("Unexpected server error", code="internal_error") + return [types.TextContent(type="text", text=json.dumps(result))] + elif name == "google_meet_v2_get_participants": + space_id = arguments.get("space_id") + if not space_id: + return [types.TextContent(type="text", text=json.dumps(failure("space_id parameter is required")))] + max_results = arguments.get("max_results", 50) + try: + result = await meet_v2.get_participants(space_id, max_results) + except Exception as e: + logger.exception("meet_v2_get_participants unexpected error=%s", e) + result = failure("Unexpected server error", code="internal_error") + return [types.TextContent(type="text", text=json.dumps(result))] + return [types.TextContent(type="text", text=json.dumps(failure(f"Unknown tool: {name}", code="unknown_tool")))] + + if stdio: + logger.info("Starting Google Meet MCP server with stdio transport") + import asyncio + async def run_stdio(): + auth_token = extract_access_token(None) + if not auth_token: + logger.error("No access token found in AUTH_DATA environment variable") + return + + token = auth_token_context.set(auth_token) + try: + async with stdio_server() as (read_stream, write_stream): + await app.run(read_stream, write_stream, app.create_initialization_options()) + finally: + auth_token_context.reset(token) + asyncio.run(run_stdio()) + return 0 + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + auth_token = extract_access_token(request) + + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + auth_token = extract_access_token(scope) + + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/google_meet/tools/__init__.py b/mcp_servers/google_meet/tools/__init__.py new file mode 100644 index 00000000..7571ea74 --- /dev/null +++ b/mcp_servers/google_meet/tools/__init__.py @@ -0,0 +1,27 @@ +from .base import ( + auth_token_context, + extract_access_token, + get_auth_token, + create_meet, + list_meetings, + list_past_meetings, + get_meeting_details, + update_meeting, + delete_meeting, + get_past_meeting_attendees, +) +from . import utils + +__all__ = [ + "auth_token_context", + "extract_access_token", + "get_auth_token", + "create_meet", + "list_meetings", + "get_meeting_details", + "list_past_meetings", + "get_past_meeting_attendees", + "update_meeting", + "delete_meeting", + "utils", +] diff --git a/mcp_servers/google_meet/tools/base.py b/mcp_servers/google_meet/tools/base.py new file mode 100644 index 00000000..75e3236b --- /dev/null +++ b/mcp_servers/google_meet/tools/base.py @@ -0,0 +1,505 @@ +from __future__ import annotations + +import base64 +import datetime +import json +import logging +import os +from contextvars import ContextVar +from typing import Any, Dict, List + +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +from .utils import ( + ValidationError, + validate_time_window, + validate_attendees, + parse_rfc3339, + success, + failure, + shape_meeting, + http_error_to_message, +) + +logger = logging.getLogger(__name__) + +# Per-request (or per-stdio session) token storage +auth_token_context: ContextVar[str] = ContextVar("auth_token", default="") + + +# -------- Token helpers -------- # +def extract_access_token(request_or_scope) -> str: + auth_data = os.getenv("AUTH_DATA") + if not auth_data and request_or_scope is not None: + try: + if hasattr(request_or_scope, 'headers'): + header_val = request_or_scope.headers.get(b'x-auth-data') or request_or_scope.headers.get('x-auth-data') + if header_val: + if isinstance(header_val, bytes): + header_val = header_val.decode('utf-8') + auth_data = base64.b64decode(header_val).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + headers = dict(request_or_scope.get('headers', [])) + header_val = headers.get(b'x-auth-data') or headers.get('x-auth-data') + if header_val: + if isinstance(header_val, bytes): + header_val = header_val.decode('utf-8') + auth_data = base64.b64decode(header_val).decode('utf-8') + except Exception as e: + logger.debug(f"Failed to pull x-auth-data header: {e}") + + if not auth_data: + return "" + try: + auth_json = json.loads(auth_data) + return auth_json.get("access_token", "") or "" + except Exception as e: + logger.warning(f"Failed to parse AUTH_DATA JSON: {e}") + return "" + + +def get_auth_token() -> str: + try: + return auth_token_context.get() + except LookupError: + return "" + + +def _calendar_service(access_token: str): + credentials = Credentials(token=access_token) + return build('calendar', 'v3', credentials=credentials) + + +# -------- Tool implementations -------- # +async def create_meet(summary: str, start_time: str, end_time: str, attendees: List[str], description: str = "", notify_attendees: bool = True) -> Dict[str, Any]: + logger.info(f"tool=create_meet action=start summary='{summary}'") + try: + if not summary or not start_time or not end_time or attendees is None: + return failure("Missing required fields", details={"required": ["summary", "start_time", "end_time", "attendees"]}) + validate_time_window(start_time, end_time) + validate_attendees(attendees) + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + event = { + 'summary': summary, + 'description': description or "", + 'start': {'dateTime': start_time, 'timeZone': 'UTC'}, + 'end': {'dateTime': end_time, 'timeZone': 'UTC'}, + 'attendees': [{'email': email} for email in attendees], + 'conferenceData': { + 'createRequest': { + 'requestId': os.urandom(8).hex(), + 'conferenceSolutionKey': {'type': 'hangoutsMeet'} + } + } + } + send_updates = 'all' if notify_attendees and attendees else 'none' + created = service.events().insert( + calendarId='primary', + body=event, + conferenceDataVersion=1, + sendUpdates=send_updates, + ).execute() + data = shape_meeting(created) + data['invitations_sent'] = bool(attendees) and notify_attendees + logger.info(f"tool=create_meet action=success event_id={data.get('event_id')}") + return success(data) + except ValidationError as ve: + logger.warning(f"tool=create_meet validation_error={ve}") + return failure(str(ve)) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=create_meet http_error status={status} msg={detail}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: + logger.exception(f"tool=create_meet unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +async def list_meetings(max_results: int = 10, start_after: str | None = None, end_before: str | None = None) -> Dict[str, Any]: + logger.info(f"tool=list_meetings action=start max_results={max_results}") + try: + if max_results <= 0 or max_results > 100: + return failure("max_results must be between 1 and 100") + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + now = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' + time_min = start_after or now + if start_after: + parse_rfc3339(start_after) + if end_before: + parse_rfc3339(end_before) + events_result = service.events().list( + calendarId='primary', + timeMin=time_min, + timeMax=end_before, + maxResults=max_results, + singleEvents=True, + orderBy='startTime', + ).execute() + events = events_result.get('items', []) + meet_events = [] + for event in events: + hangout_link = event.get('hangoutLink') or "" + conference_data = event.get('conferenceData', {}) or {} + is_meet = False + if hangout_link and 'meet.google.com' in hangout_link: + is_meet = True + else: + for ep in conference_data.get('entryPoints', []) or []: + if ep.get('entryPointType') == 'video' and 'meet.google.com' in (ep.get('uri') or ''): + is_meet = True + break + if is_meet: + meet_events.append(event) + meetings = [shape_meeting(e) for e in meet_events] + logger.info(f"tool=list_meetings action=success count={len(meetings)}") + return success({"meetings": meetings, "total_count": len(meetings)}) + except ValidationError as ve: + logger.warning(f"tool=list_meetings validation_error={ve}") + return failure(str(ve)) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=list_meetings http_error status={status} msg={detail}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=list_meetings unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +async def get_meeting_details(event_id: str) -> Dict[str, Any]: + logger.info(f"tool=get_meeting_details action=start event_id={event_id}") + try: + if not event_id: + return failure("event_id is required") + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + event = service.events().get(calendarId='primary', eventId=event_id).execute() + data = shape_meeting(event) + logger.info(f"tool=get_meeting_details action=success event_id={event_id}") + return success(data) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=get_meeting_details http_error status={status} event_id={event_id}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=get_meeting_details unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +async def update_meeting(event_id: str, summary: str = None, start_time: str = None, end_time: str = None, + attendees: List[str] = None, description: str = None, notify_attendees: bool = True) -> Dict[str, Any]: + logger.info(f"tool=update_meeting action=start event_id={event_id}") + try: + if not event_id: + return failure("event_id is required") + if not any([summary, start_time, end_time, attendees, description]): + return failure("At least one field to update must be provided") + if start_time and end_time: + validate_time_window(start_time, end_time) + elif start_time or end_time: + if start_time: + parse_rfc3339(start_time) + if end_time: + parse_rfc3339(end_time) + if attendees is not None: + validate_attendees(attendees) + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + event = service.events().get(calendarId='primary', eventId=event_id).execute() + original = json.loads(json.dumps(event)) # shallow clone via serialize + if summary is not None: + event['summary'] = summary + if description is not None: + event['description'] = description + if start_time is not None: + event['start'] = {'dateTime': start_time, 'timeZone': 'UTC'} + if end_time is not None: + event['end'] = {'dateTime': end_time, 'timeZone': 'UTC'} + if attendees is not None: + event['attendees'] = [{'email': email} for email in attendees] + changed = False + for key in ['summary', 'description']: + if original.get(key) != event.get(key): + changed = True + break + if not changed and (original.get('start') or {}) != (event.get('start') or {}): + changed = True + if not changed and (original.get('end') or {}) != (event.get('end') or {}): + changed = True + if not changed and attendees is not None: + orig_emails = sorted([a.get('email') for a in original.get('attendees', []) or []]) + new_emails = sorted([a.get('email') for a in event.get('attendees', []) or []]) + if orig_emails != new_emails: + changed = True + if not changed: + data = shape_meeting(event) + data['invitations_sent'] = False + logger.info(f"tool=update_meeting action=noop event_id={event_id}") + return success(data) + send_updates = 'all' if notify_attendees else 'none' + updated_event = service.events().update( + calendarId='primary', + eventId=event_id, + body=event, + conferenceDataVersion=1, + sendUpdates=send_updates, + ).execute() + data = shape_meeting(updated_event) + data['invitations_sent'] = notify_attendees + logger.info(f"tool=update_meeting action=success event_id={event_id}") + return success(data) + except ValidationError as ve: + logger.warning(f"tool=update_meeting validation_error={ve}") + return failure(str(ve)) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=update_meeting http_error status={status} event_id={event_id}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=update_meeting unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +async def delete_meeting(event_id: str) -> Dict[str, Any]: + logger.info(f"tool=delete_meeting action=start event_id={event_id}") + try: + if not event_id: + return failure("event_id is required") + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + service.events().delete(calendarId='primary', eventId=event_id).execute() + logger.info(f"tool=delete_meeting action=success event_id={event_id}") + return success({"deleted": True, "event_id": event_id}) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=delete_meeting http_error status={status} event_id={event_id}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=delete_meeting unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +__all__ = [ + "auth_token_context", + "extract_access_token", + "get_auth_token", + "create_meet", + "list_meetings", + "list_past_meetings", + "get_meeting_details", + "update_meeting", + "delete_meeting", + "get_past_meeting_attendees", +] + +async def list_past_meetings(max_results: int = 10, since: str | None = None) -> Dict[str, Any]: + logger.info(f"tool=list_past_meetings action=start max_results={max_results} since={since}") + try: + if max_results <= 0 or max_results > 100: + return failure("max_results must be between 1 and 100") + if since: + parse_rfc3339(since) + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + + now_dt = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) + if since: + time_min_str = since + lookback_days = None + else: + time_min_dt = now_dt - datetime.timedelta(days=30) + time_min_str = time_min_dt.isoformat().replace('+00:00', 'Z') + lookback_days = 30 + page_token = None + meet_events: list[dict[str, Any]] = [] + fetched_events = 0 + + def _to_dt(value: str) -> datetime.datetime | None: + if not value: + return None + try: + if value.endswith('Z'): + value = value.replace('Z', '+00:00') + dt = datetime.datetime.fromisoformat(value) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=datetime.timezone.utc) + return dt.astimezone(datetime.timezone.utc) + except Exception: + return None + + while len(meet_events) < max_results and fetched_events < 3000: + query = { + 'calendarId': 'primary', + 'singleEvents': True, + 'orderBy': 'startTime', + 'timeMin': time_min_str, + 'timeMax': now_dt.isoformat().replace('+00:00', 'Z'), + 'maxResults': 250, + } + if page_token: + query['pageToken'] = page_token + resp = service.events().list(**query).execute() + items = resp.get('items', []) + fetched_events += len(items) + + for ev in items: + if len(meet_events) >= max_results: + break + start_raw = (ev.get('start', {}) or {}).get('dateTime') or (ev.get('start', {}) or {}).get('date') + end_raw = (ev.get('end', {}) or {}).get('dateTime') or (ev.get('end', {}) or {}).get('date') + if not start_raw or not end_raw or len(start_raw) == 10 or len(end_raw) == 10: + continue + start_dt = _to_dt(start_raw) + end_dt = _to_dt(end_raw) + if not start_dt or not end_dt: + continue + if end_dt > now_dt: + continue + hangout_link = ev.get('hangoutLink') or '' + if 'meet.google.com' in hangout_link: + meet_events.append(ev) + continue + conf = ev.get('conferenceData', {}) or {} + for ep in conf.get('entryPoints', []) or []: + if ep.get('entryPointType') == 'video' and 'meet.google.com' in (ep.get('uri') or ''): + meet_events.append(ev) + break + page_token = resp.get('nextPageToken') + if not page_token: + break + + # Sort newest (latest start) first + meet_events.sort(key=lambda e: (e.get('start', {}) or {}).get('dateTime') or '', reverse=True) + shaped = [shape_meeting(e) for e in meet_events[:max_results]] + logger.info( + "tool=list_past_meetings action=success returned=%d fetched_events=%d pages_exhausted=%s", + len(shaped), fetched_events, page_token is None + ) + return success({ + "meetings": shaped, + "total_count": len(shaped), + "debug": { + "fetched_events": fetched_events, + "raw_collected": len(meet_events), + "timeMin": time_min_str, + "lookback_days": lookback_days, + }, + }) + except ValidationError as ve: + logger.warning(f"tool=list_past_meetings validation_error={ve}") + return failure(str(ve)) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=list_past_meetings http_error status={status} msg={detail}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=list_past_meetings unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + + +async def get_past_meeting_attendees(event_id: str) -> Dict[str, Any]: + + logger.info(f"tool=get_past_meeting_attendees action=start event_id={event_id}") + try: + if not event_id: + return failure("event_id is required") + token = get_auth_token() + if not token: + return failure("Missing access token", code="unauthorized") + service = _calendar_service(token) + event = service.events().get(calendarId='primary', eventId=event_id).execute() + end_info = event.get('end', {}) or {} + end_dt = end_info.get('dateTime') or end_info.get('date') + if not end_dt: + return failure("Cannot determine meeting end time", code="no_end_time") + if len(end_dt) == 10: + return failure("Event is all-day or missing precise end time", code="not_supported") + now = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' + if end_dt > now: + return failure("Meeting has not ended yet", code="meeting_not_past") + attendees = event.get('attendees', []) or [] + shaped_attendees = [ + { + "email": a.get('email', ''), + "displayName": a.get('displayName', ''), + "responseStatus": a.get('responseStatus', ''), + "optional": a.get('optional', False), + } + for a in attendees + ] + logger.info(f"tool=get_past_meeting_attendees action=success event_id={event_id} count={len(shaped_attendees)}") + meet_url = next( + ( + ep.get('uri') + for ep in (event.get('conferenceData', {}) or {}).get('entryPoints', []) or [] + if ep.get('entryPointType') == 'video' and 'meet.google.com' in (ep.get('uri') or '') + ), + event.get('hangoutLink', '') or '' + ) + if 'meet.google.com' not in meet_url: + meet_url = '' + return success({ + "event_id": event_id, + "summary": event.get('summary', ''), + "ended": end_dt, + "attendees": shaped_attendees, + "meet_url": meet_url, + }) + except HttpError as e: + status = getattr(e.resp, 'status', 0) + detail = http_error_to_message(status, "Google Calendar API error") + try: + error_detail = json.loads(e.content.decode('utf-8')) + except Exception: + error_detail = {} + logger.error(f"tool=get_past_meeting_attendees http_error status={status} event_id={event_id}") + return failure(detail, code=str(status or 'http_error'), details=error_detail) + except Exception as e: # pragma: no cover + logger.exception(f"tool=get_past_meeting_attendees unexpected_error={e}") + return failure("Unexpected server error", code="internal_error") + \ No newline at end of file diff --git a/mcp_servers/google_meet/tools/meet_api.py b/mcp_servers/google_meet/tools/meet_api.py new file mode 100644 index 00000000..79d8f688 --- /dev/null +++ b/mcp_servers/google_meet/tools/meet_api.py @@ -0,0 +1,290 @@ +from __future__ import annotations + +import json +import logging +import os +from functools import lru_cache +from typing import Any, Dict, List, Optional +from urllib.parse import urlencode +from urllib.request import Request, urlopen +from urllib.error import HTTPError, URLError + +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +from .utils import success, failure +from .base import get_auth_token + +logger = logging.getLogger(__name__) + +MEET_API_DISABLED_ENV = os.getenv("MEET_API_DISABLED_ENV", "MEET_API_V2_DISABLED") +MEET_API_BASE = os.getenv("MEET_API_BASE", "https://meet.googleapis.com/v2") + + +def _meet_service(access_token: str): + creds = Credentials(token=access_token) + return build("meet", "v2", credentials=creds, cache_discovery=False) + + +def _http_get_json(path: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + token = get_auth_token() + if not token: + raise RuntimeError("Missing access token") + url = f"{MEET_API_BASE}{path}" + if params: + url = f"{url}?{urlencode(params)}" + req = Request(url, headers={ + "Authorization": f"Bearer {token}", + "Accept": "application/json", + }) + with urlopen(req, timeout=15) as resp: + data = resp.read().decode("utf-8") + return json.loads(data) if data else {} + + +@lru_cache(maxsize=1) +def _capability_probe() -> Dict[str, Any]: + if os.getenv(MEET_API_DISABLED_ENV, "").lower() in {"1", "true", "yes"}: + return {"enabled": False, "code": "disabled_env", "error": "Meet API v2 disabled by env"} + token = get_auth_token() + if not token: + return {"enabled": False, "code": "unauthorized", "error": "Missing access token"} + try: + svc = _meet_service(token) + spaces_res = svc.spaces() + has_list = hasattr(spaces_res, "list") + has_create = hasattr(spaces_res, "create") + if has_list: + try: + resp = spaces_res.list(pageSize=1).execute() + logger.debug("meet_v2 probe success keys=%s", list(resp.keys())) + return {"enabled": True, "mode": "list_probe"} + except AttributeError: + has_list = False + if not has_list and has_create: + logger.debug("meet_v2 probe falling back to create capability assumption") + return {"enabled": True, "mode": "assumed_via_create"} + if not (has_list or has_create): + return {"enabled": False, "code": "unsupported_client", "error": "Meet API v2 methods not present in discovery"} + return {"enabled": True, "mode": "unknown_fallback"} + except HttpError as e: + status = getattr(e.resp, "status", 0) + try: + detail = json.loads(e.content.decode("utf-8")) + except Exception: + detail = {} + logger.info("meet_v2 probe http_error status=%s detail=%s", status, detail) + code = "meet_api_unavailable" + if status == 403: + code = "forbidden_or_consumer" + elif status == 404: + code = "not_enabled" + return {"enabled": False, "code": code, "error": "Meet API v2 not available"} + except Exception as e: + logger.warning("meet_v2 probe unexpected=%s", e) + return {"enabled": False, "code": "probe_error", "error": str(e)} + + +def _ensure_enabled() -> Optional[Dict[str, Any]]: + probe = _capability_probe() + if not probe.get("enabled"): + return failure( + probe.get("error", "Meet API v2 unavailable"), + code=probe.get("code", "meet_api_unavailable"), + details={k: v for k, v in probe.items() if k != "enabled" and v is not None}, + ) + return None + + +def _shape_space(space: Dict[str, Any]) -> Dict[str, Any]: + return { + "space_id": space.get("name") or space.get("id"), + "display_name": space.get("displayName") or space.get("topic"), + "meeting_url": space.get("meetingUri") or space.get("meetingUrl"), + "create_time": space.get("createTime"), + "end_time": space.get("endTime"), + "state": space.get("state"), + "raw": space, + } + + +async def create_instant_meeting() -> Dict[str, Any]: + logger.info("tool=google_meet_v2_create_instant action=start") + fail = _ensure_enabled() + if fail: + return fail + token = get_auth_token() + try: + svc = _meet_service(token) + space = svc.spaces().create(body={}).execute() + shaped = _shape_space(space) + logger.info("tool=google_meet_v2_create_instant action=success space_id=%s", shaped.get("space_id")) + return success(shaped) + except HttpError as e: + status = getattr(e.resp, "status", 0) + try: + detail = json.loads(e.content.decode("utf-8")) + except Exception: + detail = {} + logger.error("tool=google_meet_v2_create_instant http_error status=%s", status) + return failure("Meet API error", code=str(status or "http_error"), details=detail) + except Exception as e: + logger.exception("tool=google_meet_v2_create_instant unexpected_error=%s", e) + return failure("Unexpected server error", code="internal_error") + + +async def get_meeting(space_id: str) -> Dict[str, Any]: + logger.info("tool=google_meet_v2_get_meeting action=start space_id=%s", space_id) + if not space_id: + return failure("space_id is required") + fail = _ensure_enabled() + if fail: + return fail + token = get_auth_token() + try: + svc = _meet_service(token) + space = svc.spaces().get(name=space_id if space_id.startswith("spaces/") else f"spaces/{space_id}").execute() + return success(_shape_space(space)) + except HttpError as e: + status = getattr(e.resp, "status", 0) + try: + detail = json.loads(e.content.decode("utf-8")) + except Exception: + detail = {} + return failure("Failed to fetch meeting", code=str(status or "http_error"), details=detail) + except Exception as e: + logger.exception("tool=google_meet_v2_get_meeting unexpected_error=%s", e) + return failure("Unexpected server error", code="internal_error") + + +# NOTE: +# Leaving implementation for future enablement when API/scopes are broadly available. +async def list_meetings(max_results: int = 10) -> Dict[str, Any]: + logger.info("tool=google_meet_v2_list_meetings action=start max_results=%s", max_results) + if max_results <= 0 or max_results > 100: + return failure("max_results must be between 1 and 100") + fail = _ensure_enabled() + if fail: + return fail + token = get_auth_token() + try: + svc = _meet_service(token) + spaces_res = svc.spaces() + use_http_fallback = not hasattr(spaces_res, "list") + page_token = None + spaces: List[Dict[str, Any]] = [] + while len(spaces) < max_results: + page_size = min(50, max_results - len(spaces)) + if not use_http_fallback: + resp = spaces_res.list(pageSize=page_size, pageToken=page_token).execute() if page_token else spaces_res.list(pageSize=page_size).execute() + else: + # Fallback to raw HTTP if discovery lacks list + params = {"pageSize": page_size} + if page_token: + params["pageToken"] = page_token + try: + resp = _http_get_json("/spaces", params) + except HTTPError as e: + try: + detail = json.loads(e.read().decode("utf-8")) + except Exception: + detail = {"body": e.read().decode("utf-8", "ignore") if hasattr(e, 'read') else None} + return failure("Failed to list meetings", code=str(e.code or "http_error"), details=detail) + batch = resp.get("spaces", []) or resp.get("items", []) + for sp in batch: + if len(spaces) >= max_results: + break + spaces.append(sp) + page_token = resp.get("nextPageToken") + if not page_token: + break + shaped = [_shape_space(sp) for sp in spaces] + return success({"meetings": shaped, "total_count": len(shaped)}) + except HttpError as e: + status = getattr(e.resp, "status", 0) + try: + detail = json.loads(e.content.decode("utf-8")) + except Exception: + detail = {} + return failure("Failed to list meetings", code=str(status or "http_error"), details=detail) + except Exception as e: + logger.exception("tool=google_meet_v2_list_meetings unexpected_error=%s", e) + return failure("Unexpected server error", code="internal_error") + + +# NOTE: +# Leaving implementation for future enablement when API/scopes are broadly available. +async def get_participants(space_id: str, max_results: int = 50) -> Dict[str, Any]: + logger.info("tool=google_meet_v2_get_participants action=start space_id=%s", space_id) + if not space_id: + return failure("space_id is required") + if max_results <= 0 or max_results > 300: + return failure("max_results must be between 1 and 300") + fail = _ensure_enabled() + if fail: + return fail + token = get_auth_token() + try: + svc = _meet_service(token) + parent = space_id if space_id.startswith("spaces/") else f"spaces/{space_id}" + spaces_res = svc.spaces() + has_nested_participants = hasattr(spaces_res, "participants") + participants: List[Dict[str, Any]] = [] + page_token = None + while len(participants) < max_results: + page_size = min(50, max_results - len(participants)) + if has_nested_participants: + kwargs = {"parent": parent, "pageSize": page_size} + if page_token: + kwargs["pageToken"] = page_token + resp = spaces_res.participants().list(**kwargs).execute() + else: + # Fallback to raw HTTP: GET /v2/{parent}/participants + params = {"pageSize": page_size} + if page_token: + params["pageToken"] = page_token + try: + resp = _http_get_json(f"/{parent}/participants", params) + except HTTPError as e: + try: + detail = json.loads(e.read().decode("utf-8")) + except Exception: + detail = {"body": e.read().decode("utf-8", "ignore") if hasattr(e, 'read') else None} + return failure("Failed to get participants", code=str(e.code or "http_error"), details=detail) + batch = resp.get("participants", []) or resp.get("items", []) + for p in batch: + if len(participants) >= max_results: + break + participants.append(p) + page_token = resp.get("nextPageToken") + if not page_token: + break + shaped = [ + { + "user": (p.get("user") or {}).get("name") or p.get("name"), + "role": p.get("role"), + "state": p.get("state"), + "raw": p, + } + for p in participants + ] + return success({"participants": shaped, "total_count": len(shaped)}) + except HttpError as e: + status = getattr(e.resp, "status", 0) + try: + detail = json.loads(e.content.decode("utf-8")) + except Exception: + detail = {} + return failure("Failed to get participants", code=str(status or "http_error"), details=detail) + except Exception as e: + logger.exception("tool=google_meet_v2_get_participants unexpected_error=%s", e) + return failure("Unexpected server error", code="internal_error") + + +__all__ = [ + "create_instant_meeting", + "get_meeting", + "list_meetings", + "get_participants", +] diff --git a/mcp_servers/google_meet/tools/utils.py b/mcp_servers/google_meet/tools/utils.py new file mode 100644 index 00000000..68b39637 --- /dev/null +++ b/mcp_servers/google_meet/tools/utils.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import re +import datetime as _dt +from typing import Any, Dict, List, Optional + +RFC3339_REGEX = re.compile( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z$" +) + +EMAIL_REGEX = re.compile(r"^[^@\s]+@[^@\s]+\.[^@\s]+$") + + +class ValidationError(ValueError): + """Raised when tool input validation fails.""" + + +def is_rfc3339(value: str) -> bool: + return bool(RFC3339_REGEX.match(value)) + + +def parse_rfc3339(value: str) -> _dt.datetime: + if not is_rfc3339(value): + raise ValidationError( + f"Value '{value}' must be RFC3339 UTC (e.g. 2025-01-01T10:00:00Z)" + ) + # Python can parse with strptime; handle optional fractional seconds + fmt_main = "%Y-%m-%dT%H:%M:%SZ" + if "." in value: + base, frac = value[:-1].split(".") + frac = (frac + "000000")[:6] + dt = _dt.datetime.strptime(base, "%Y-%m-%dT%H:%M:%S") + return dt.replace(microsecond=int(frac), tzinfo=_dt.timezone.utc) + return _dt.datetime.strptime(value, fmt_main).replace(tzinfo=_dt.timezone.utc) + + +def validate_time_window(start: str, end: str) -> None: + start_dt = parse_rfc3339(start) + end_dt = parse_rfc3339(end) + if end_dt <= start_dt: + raise ValidationError("end_time must be after start_time") + + +def validate_attendees(attendees: List[str]) -> None: + if len(attendees) > 100: + raise ValidationError("Too many attendees (max 100)") + for a in attendees: + if not EMAIL_REGEX.match(a): + raise ValidationError(f"Invalid attendee email: {a}") + + +def success(data: Dict[str, Any]) -> Dict[str, Any]: + return {"success": True, "data": data} + + +def failure(message: str, code: str = "validation_error", details: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + body: Dict[str, Any] = {"success": False, "error": {"code": code, "message": message}} + if details: + body["error"]["details"] = details + return body + + +def shape_meeting(event: Dict[str, Any]) -> Dict[str, Any]: + meet_url = "" + conference = event.get("conferenceData", {}) or {} + for ep in conference.get("entryPoints", []) or []: + if ep.get("entryPointType") == "video" and 'meet.google.com' in (ep.get("uri") or ''): + meet_url = ep.get("uri", "") + break + # Fallback to hangoutLink if no conferenceData video entry point found + if not meet_url: + hl = event.get("hangoutLink") or "" + if 'meet.google.com' in hl: + meet_url = hl + return { + "event_id": event.get("id"), + "summary": event.get("summary", ""), + "description": event.get("description", ""), + "start": event.get("start", {}), + "end": event.get("end", {}), + "meet_url": meet_url, + "attendees": [ + { + "email": att.get("email", ""), + "displayName": att.get("displayName", ""), + "responseStatus": att.get("responseStatus", "") + } + for att in event.get("attendees", []) or [] + ], + "status": event.get("status"), + "created": event.get("created"), + "updated": event.get("updated"), + } + + +def http_error_to_message(status: int, default: str) -> str: + mapping = { + 400: "Bad request to Google Calendar API", + 401: "Unauthorized – access token invalid or expired", + 403: "Forbidden – insufficient permissions or missing scopes", + 404: "Event not found", + 409: "Conflict – concurrency issue", + 429: "Rate limit exceeded – retry later", + 500: "Google internal error – retry later", + 503: "Service unavailable – retry later", + } + return mapping.get(status, default) diff --git a/mcp_servers/google_meet/uv.lock b/mcp_servers/google_meet/uv.lock new file mode 100644 index 00000000..b457e4f7 --- /dev/null +++ b/mcp_servers/google_meet/uv.lock @@ -0,0 +1,1186 @@ +version = 1 +revision = 2 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "cachetools" +version = "5.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380, upload-time = "2025-02-20T21:01:19.524Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080, upload-time = "2025-02-20T21:01:16.647Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "fastapi" +version = "0.116.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, +] + +[[package]] +name = "google-api-core" +version = "2.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/21/e9d043e88222317afdbdb567165fdbc3b0aad90064c7e0c9eb0ad9955ad8/google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8", size = 165443, upload-time = "2025-06-12T20:52:20.439Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/4b/ead00905132820b623732b175d66354e9d3e69fcf2a5dcdab780664e7896/google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7", size = 160807, upload-time = "2025-06-12T20:52:19.334Z" }, +] + +[[package]] +name = "google-api-python-client" +version = "2.181.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "httplib2" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/96/5561a5d7e37781c880ca90975a70d61940ec1648b2b12e991311a9e39f83/google_api_python_client-2.181.0.tar.gz", hash = "sha256:d7060962a274a16a2c6f8fb4b1569324dbff11bfbca8eb050b88ead1dd32261c", size = 13545438, upload-time = "2025-09-02T15:41:33.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/03/72b7acf374a2cde9255df161686f00d8370117ac33e2bdd8fdadfe30272a/google_api_python_client-2.181.0-py3-none-any.whl", hash = "sha256:348730e3ece46434a01415f3d516d7a0885c8e624ce799f50f2d4d86c2475fb7", size = 14111793, upload-time = "2025-09-02T15:41:31.322Z" }, +] + +[[package]] +name = "google-auth" +version = "2.40.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", size = 281029, upload-time = "2025-06-04T18:04:57.577Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137, upload-time = "2025-06-04T18:04:55.573Z" }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "httplib2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842, upload-time = "2023-12-12T17:40:30.722Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253, upload-time = "2023-12-12T17:40:13.055Z" }, +] + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "requests-oauthlib" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/87/e10bf24f7bcffc1421b84d6f9c3377c30ec305d082cd737ddaa6d8f77f7c/google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684", size = 20955, upload-time = "2025-04-22T16:40:29.172Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/84/40ee070be95771acd2f4418981edb834979424565c3eec3cd88b6aa09d24/google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2", size = 19072, upload-time = "2025-04-22T16:40:28.174Z" }, +] + +[[package]] +name = "google-meet-mcp-server" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "fastapi" }, + { name = "google-api-python-client" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "google-auth-oauthlib" }, + { name = "mcp" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.metadata] +requires-dist = [ + { name = "click" }, + { name = "fastapi" }, + { name = "google-api-python-client" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "google-auth-oauthlib" }, + { name = "mcp", specifier = ">=1.12.0" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "uvicorn", extras = ["standard"] }, +] + +[package.metadata.requires-dev] +dev = [] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httplib2" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/75/1d10a90b3411f707c10c226fa918cf4f5e0578113caa223369130f702b6b/httplib2-0.30.0.tar.gz", hash = "sha256:d5b23c11fcf8e57e00ff91b7008656af0f6242c8886fd97065c97509e4e548c5", size = 249764, upload-time = "2025-08-29T18:58:36.497Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/7c/f35bd530a35654ef3ff81f5e102572b8b620361659e090beb85a73a3bcc9/httplib2-0.30.0-py3-none-any.whl", hash = "sha256:d10443a2bdfe0ea5dbb17e016726146d48b574208dafd41e854cf34e7d78842c", size = 91101, upload-time = "2025-08-29T18:58:33.224Z" }, +] + +[[package]] +name = "httptools" +version = "0.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0", size = 198780, upload-time = "2024-10-16T19:44:06.882Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da", size = 103297, upload-time = "2024-10-16T19:44:08.129Z" }, + { url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1", size = 443130, upload-time = "2024-10-16T19:44:09.45Z" }, + { url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50", size = 442148, upload-time = "2024-10-16T19:44:11.539Z" }, + { url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959", size = 415949, upload-time = "2024-10-16T19:44:13.388Z" }, + { url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4", size = 417591, upload-time = "2024-10-16T19:44:15.258Z" }, + { url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c", size = 88344, upload-time = "2024-10-16T19:44:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, + { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, + { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, + { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" }, + { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + +[[package]] +name = "mcp" +version = "1.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/3c/82c400c2d50afdac4fbefb5b4031fd327e2ad1f23ccef8eee13c5909aa48/mcp-1.13.1.tar.gz", hash = "sha256:165306a8fd7991dc80334edd2de07798175a56461043b7ae907b279794a834c5", size = 438198, upload-time = "2025-08-22T09:22:16.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/3f/d085c7f49ade6d273b185d61ec9405e672b6433f710ea64a90135a8dd445/mcp-1.13.1-py3-none-any.whl", hash = "sha256:c314e7c8bd477a23ba3ef472ee5a32880316c42d03e06dcfa31a1cc7a73b65df", size = 161494, upload-time = "2025-08-22T09:22:14.705Z" }, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, +] + +[[package]] +name = "proto-plus" +version = "1.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, +] + +[[package]] +name = "protobuf" +version = "6.32.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614, upload-time = "2025-08-14T21:21:25.015Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409, upload-time = "2025-08-14T21:21:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735, upload-time = "2025-08-14T21:21:15.046Z" }, + { url = "https://files.pythonhosted.org/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449, upload-time = "2025-08-14T21:21:16.687Z" }, + { url = "https://files.pythonhosted.org/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869, upload-time = "2025-08-14T21:21:18.282Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009, upload-time = "2025-08-14T21:21:19.893Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287, upload-time = "2025-08-14T21:21:23.515Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, + { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, + { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, + { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, + { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, + { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, + { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, + { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, + { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, + { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.47.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uritemplate" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f", size = 1442019, upload-time = "2024-10-14T23:37:20.068Z" }, + { url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d", size = 801898, upload-time = "2024-10-14T23:37:22.663Z" }, + { url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26", size = 3827735, upload-time = "2024-10-14T23:37:25.129Z" }, + { url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb", size = 3825126, upload-time = "2024-10-14T23:37:27.59Z" }, + { url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f", size = 3705789, upload-time = "2024-10-14T23:37:29.385Z" }, + { url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c", size = 3800523, upload-time = "2024-10-14T23:37:32.048Z" }, + { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410, upload-time = "2024-10-14T23:37:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476, upload-time = "2024-10-14T23:37:36.11Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855, upload-time = "2024-10-14T23:37:37.683Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185, upload-time = "2024-10-14T23:37:40.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256, upload-time = "2024-10-14T23:37:42.839Z" }, + { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323, upload-time = "2024-10-14T23:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/dd/579d1dc57f0f895426a1211c4ef3b0cb37eb9e642bb04bdcd962b5df206a/watchfiles-1.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:27f30e14aa1c1e91cb653f03a63445739919aef84c8d2517997a83155e7a2fcc", size = 405757, upload-time = "2025-06-15T19:04:51.058Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/7a0318cd874393344d48c34d53b3dd419466adf59a29ba5b51c88dd18b86/watchfiles-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3366f56c272232860ab45c77c3ca7b74ee819c8e1f6f35a7125556b198bbc6df", size = 397511, upload-time = "2025-06-15T19:04:52.79Z" }, + { url = "https://files.pythonhosted.org/packages/06/be/503514656d0555ec2195f60d810eca29b938772e9bfb112d5cd5ad6f6a9e/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8412eacef34cae2836d891836a7fff7b754d6bcac61f6c12ba5ca9bc7e427b68", size = 450739, upload-time = "2025-06-15T19:04:54.203Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0d/a05dd9e5f136cdc29751816d0890d084ab99f8c17b86f25697288ca09bc7/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df670918eb7dd719642e05979fc84704af913d563fd17ed636f7c4783003fdcc", size = 458106, upload-time = "2025-06-15T19:04:55.607Z" }, + { url = "https://files.pythonhosted.org/packages/f1/fa/9cd16e4dfdb831072b7ac39e7bea986e52128526251038eb481effe9f48e/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7642b9bc4827b5518ebdb3b82698ada8c14c7661ddec5fe719f3e56ccd13c97", size = 484264, upload-time = "2025-06-15T19:04:57.009Z" }, + { url = "https://files.pythonhosted.org/packages/32/04/1da8a637c7e2b70e750a0308e9c8e662ada0cca46211fa9ef24a23937e0b/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:199207b2d3eeaeb80ef4411875a6243d9ad8bc35b07fc42daa6b801cc39cc41c", size = 597612, upload-time = "2025-06-15T19:04:58.409Z" }, + { url = "https://files.pythonhosted.org/packages/30/01/109f2762e968d3e58c95731a206e5d7d2a7abaed4299dd8a94597250153c/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a479466da6db5c1e8754caee6c262cd373e6e6c363172d74394f4bff3d84d7b5", size = 477242, upload-time = "2025-06-15T19:04:59.786Z" }, + { url = "https://files.pythonhosted.org/packages/b5/b8/46f58cf4969d3b7bc3ca35a98e739fa4085b0657a1540ccc29a1a0bc016f/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935f9edd022ec13e447e5723a7d14456c8af254544cefbc533f6dd276c9aa0d9", size = 453148, upload-time = "2025-06-15T19:05:01.103Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cd/8267594263b1770f1eb76914940d7b2d03ee55eca212302329608208e061/watchfiles-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8076a5769d6bdf5f673a19d51da05fc79e2bbf25e9fe755c47595785c06a8c72", size = 626574, upload-time = "2025-06-15T19:05:02.582Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2f/7f2722e85899bed337cba715723e19185e288ef361360718973f891805be/watchfiles-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86b1e28d4c37e89220e924305cd9f82866bb0ace666943a6e4196c5df4d58dcc", size = 624378, upload-time = "2025-06-15T19:05:03.719Z" }, + { url = "https://files.pythonhosted.org/packages/bf/20/64c88ec43d90a568234d021ab4b2a6f42a5230d772b987c3f9c00cc27b8b/watchfiles-1.1.0-cp310-cp310-win32.whl", hash = "sha256:d1caf40c1c657b27858f9774d5c0e232089bca9cb8ee17ce7478c6e9264d2587", size = 279829, upload-time = "2025-06-15T19:05:04.822Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/a9c1ed33de7af80935e4eac09570de679c6e21c07070aa99f74b4431f4d6/watchfiles-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a89c75a5b9bc329131115a409d0acc16e8da8dfd5867ba59f1dd66ae7ea8fa82", size = 292192, upload-time = "2025-06-15T19:05:06.348Z" }, + { url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751, upload-time = "2025-06-15T19:05:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313, upload-time = "2025-06-15T19:05:08.764Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792, upload-time = "2025-06-15T19:05:09.869Z" }, + { url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196, upload-time = "2025-06-15T19:05:11.91Z" }, + { url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788, upload-time = "2025-06-15T19:05:13.373Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879, upload-time = "2025-06-15T19:05:14.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447, upload-time = "2025-06-15T19:05:15.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145, upload-time = "2025-06-15T19:05:17.17Z" }, + { url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539, upload-time = "2025-06-15T19:05:18.557Z" }, + { url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472, upload-time = "2025-06-15T19:05:19.588Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348, upload-time = "2025-06-15T19:05:20.856Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607, upload-time = "2025-06-15T19:05:21.937Z" }, + { url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056, upload-time = "2025-06-15T19:05:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" }, + { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" }, + { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" }, + { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" }, + { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" }, + { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" }, + { url = "https://files.pythonhosted.org/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004, upload-time = "2025-06-15T19:05:38.499Z" }, + { url = "https://files.pythonhosted.org/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671, upload-time = "2025-06-15T19:05:39.52Z" }, + { url = "https://files.pythonhosted.org/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772, upload-time = "2025-06-15T19:05:40.897Z" }, + { url = "https://files.pythonhosted.org/packages/ff/05/46dd1f6879bc40e1e74c6c39a1b9ab9e790bf1f5a2fe6c08b463d9a807f4/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b", size = 456789, upload-time = "2025-06-15T19:05:42.045Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ca/0eeb2c06227ca7f12e50a47a3679df0cd1ba487ea19cf844a905920f8e95/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895", size = 482551, upload-time = "2025-06-15T19:05:43.781Z" }, + { url = "https://files.pythonhosted.org/packages/31/47/2cecbd8694095647406645f822781008cc524320466ea393f55fe70eed3b/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a", size = 597420, upload-time = "2025-06-15T19:05:45.244Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7e/82abc4240e0806846548559d70f0b1a6dfdca75c1b4f9fa62b504ae9b083/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b", size = 477950, upload-time = "2025-06-15T19:05:46.332Z" }, + { url = "https://files.pythonhosted.org/packages/25/0d/4d564798a49bf5482a4fa9416dea6b6c0733a3b5700cb8a5a503c4b15853/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c", size = 451706, upload-time = "2025-06-15T19:05:47.459Z" }, + { url = "https://files.pythonhosted.org/packages/81/b5/5516cf46b033192d544102ea07c65b6f770f10ed1d0a6d388f5d3874f6e4/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b", size = 625814, upload-time = "2025-06-15T19:05:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/0c/dd/7c1331f902f30669ac3e754680b6edb9a0dd06dea5438e61128111fadd2c/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb", size = 622820, upload-time = "2025-06-15T19:05:50.088Z" }, + { url = "https://files.pythonhosted.org/packages/1b/14/36d7a8e27cd128d7b1009e7715a7c02f6c131be9d4ce1e5c3b73d0e342d8/watchfiles-1.1.0-cp313-cp313-win32.whl", hash = "sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9", size = 279194, upload-time = "2025-06-15T19:05:51.186Z" }, + { url = "https://files.pythonhosted.org/packages/25/41/2dd88054b849aa546dbeef5696019c58f8e0774f4d1c42123273304cdb2e/watchfiles-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7", size = 292349, upload-time = "2025-06-15T19:05:52.201Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cf/421d659de88285eb13941cf11a81f875c176f76a6d99342599be88e08d03/watchfiles-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5", size = 283836, upload-time = "2025-06-15T19:05:53.265Z" }, + { url = "https://files.pythonhosted.org/packages/45/10/6faf6858d527e3599cc50ec9fcae73590fbddc1420bd4fdccfebffeedbc6/watchfiles-1.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1", size = 400343, upload-time = "2025-06-15T19:05:54.252Z" }, + { url = "https://files.pythonhosted.org/packages/03/20/5cb7d3966f5e8c718006d0e97dfe379a82f16fecd3caa7810f634412047a/watchfiles-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339", size = 392916, upload-time = "2025-06-15T19:05:55.264Z" }, + { url = "https://files.pythonhosted.org/packages/8c/07/d8f1176328fa9e9581b6f120b017e286d2a2d22ae3f554efd9515c8e1b49/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633", size = 449582, upload-time = "2025-06-15T19:05:56.317Z" }, + { url = "https://files.pythonhosted.org/packages/66/e8/80a14a453cf6038e81d072a86c05276692a1826471fef91df7537dba8b46/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011", size = 456752, upload-time = "2025-06-15T19:05:57.359Z" }, + { url = "https://files.pythonhosted.org/packages/5a/25/0853b3fe0e3c2f5af9ea60eb2e781eade939760239a72c2d38fc4cc335f6/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670", size = 481436, upload-time = "2025-06-15T19:05:58.447Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9e/4af0056c258b861fbb29dcb36258de1e2b857be4a9509e6298abcf31e5c9/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf", size = 596016, upload-time = "2025-06-15T19:05:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fa/95d604b58aa375e781daf350897aaaa089cff59d84147e9ccff2447c8294/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4", size = 476727, upload-time = "2025-06-15T19:06:01.086Z" }, + { url = "https://files.pythonhosted.org/packages/65/95/fe479b2664f19be4cf5ceeb21be05afd491d95f142e72d26a42f41b7c4f8/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20", size = 451864, upload-time = "2025-06-15T19:06:02.144Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/3c4af14b93a15ce55901cd7a92e1a4701910f1768c78fb30f61d2b79785b/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef", size = 625626, upload-time = "2025-06-15T19:06:03.578Z" }, + { url = "https://files.pythonhosted.org/packages/da/f5/cf6aa047d4d9e128f4b7cde615236a915673775ef171ff85971d698f3c2c/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb", size = 622744, upload-time = "2025-06-15T19:06:05.066Z" }, + { url = "https://files.pythonhosted.org/packages/2c/00/70f75c47f05dea6fd30df90f047765f6fc2d6eb8b5a3921379b0b04defa2/watchfiles-1.1.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9974d2f7dc561cce3bb88dfa8eb309dab64c729de85fba32e98d75cf24b66297", size = 402114, upload-time = "2025-06-15T19:06:06.186Z" }, + { url = "https://files.pythonhosted.org/packages/53/03/acd69c48db4a1ed1de26b349d94077cca2238ff98fd64393f3e97484cae6/watchfiles-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c68e9f1fcb4d43798ad8814c4c1b61547b014b667216cb754e606bfade587018", size = 393879, upload-time = "2025-06-15T19:06:07.369Z" }, + { url = "https://files.pythonhosted.org/packages/2f/c8/a9a2a6f9c8baa4eceae5887fecd421e1b7ce86802bcfc8b6a942e2add834/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95ab1594377effac17110e1352989bdd7bdfca9ff0e5eeccd8c69c5389b826d0", size = 450026, upload-time = "2025-06-15T19:06:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/fe/51/d572260d98388e6e2b967425c985e07d47ee6f62e6455cefb46a6e06eda5/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fba9b62da882c1be1280a7584ec4515d0a6006a94d6e5819730ec2eab60ffe12", size = 457917, upload-time = "2025-06-15T19:06:09.988Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/4258e52917bf9f12909b6ec314ff9636276f3542f9d3807d143f27309104/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3434e401f3ce0ed6b42569128b3d1e3af773d7ec18751b918b89cd49c14eaafb", size = 483602, upload-time = "2025-06-15T19:06:11.088Z" }, + { url = "https://files.pythonhosted.org/packages/84/99/bee17a5f341a4345fe7b7972a475809af9e528deba056f8963d61ea49f75/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa257a4d0d21fcbca5b5fcba9dca5a78011cb93c0323fb8855c6d2dfbc76eb77", size = 596758, upload-time = "2025-06-15T19:06:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/e4bec1d59b25b89d2b0716b41b461ed655a9a53c60dc78ad5771fda5b3e6/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd1b3879a578a8ec2076c7961076df540b9af317123f84569f5a9ddee64ce92", size = 477601, upload-time = "2025-06-15T19:06:13.391Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fa/a514292956f4a9ce3c567ec0c13cce427c158e9f272062685a8a727d08fc/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc7a30eeb0e20ecc5f4bd113cd69dcdb745a07c68c0370cea919f373f65d9e", size = 451936, upload-time = "2025-06-15T19:06:14.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/5d/c3bf927ec3bbeb4566984eba8dd7a8eb69569400f5509904545576741f88/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:891c69e027748b4a73847335d208e374ce54ca3c335907d381fde4e41661b13b", size = 626243, upload-time = "2025-06-15T19:06:16.232Z" }, + { url = "https://files.pythonhosted.org/packages/e6/65/6e12c042f1a68c556802a84d54bb06d35577c81e29fba14019562479159c/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:12fe8eaffaf0faa7906895b4f8bb88264035b3f0243275e0bf24af0436b27259", size = 623073, upload-time = "2025-06-15T19:06:17.457Z" }, + { url = "https://files.pythonhosted.org/packages/89/ab/7f79d9bf57329e7cbb0a6fd4c7bd7d0cee1e4a8ef0041459f5409da3506c/watchfiles-1.1.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bfe3c517c283e484843cb2e357dd57ba009cff351edf45fb455b5fbd1f45b15f", size = 400872, upload-time = "2025-06-15T19:06:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/df/d5/3f7bf9912798e9e6c516094db6b8932df53b223660c781ee37607030b6d3/watchfiles-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a9ccbf1f129480ed3044f540c0fdbc4ee556f7175e5ab40fe077ff6baf286d4e", size = 392877, upload-time = "2025-06-15T19:06:19.55Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c5/54ec7601a2798604e01c75294770dbee8150e81c6e471445d7601610b495/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba0e3255b0396cac3cc7bbace76404dd72b5438bf0d8e7cefa2f79a7f3649caa", size = 449645, upload-time = "2025-06-15T19:06:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/0a/04/c2f44afc3b2fce21ca0b7802cbd37ed90a29874f96069ed30a36dfe57c2b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4281cd9fce9fc0a9dbf0fc1217f39bf9cf2b4d315d9626ef1d4e87b84699e7e8", size = 457424, upload-time = "2025-06-15T19:06:21.712Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b0/eec32cb6c14d248095261a04f290636da3df3119d4040ef91a4a50b29fa5/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d2404af8db1329f9a3c9b79ff63e0ae7131986446901582067d9304ae8aaf7f", size = 481584, upload-time = "2025-06-15T19:06:22.777Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/ca4bb71c68a937d7145aa25709e4f5d68eb7698a25ce266e84b55d591bbd/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e78b6ed8165996013165eeabd875c5dfc19d41b54f94b40e9fff0eb3193e5e8e", size = 596675, upload-time = "2025-06-15T19:06:24.226Z" }, + { url = "https://files.pythonhosted.org/packages/a1/dd/b0e4b7fb5acf783816bc950180a6cd7c6c1d2cf7e9372c0ea634e722712b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:249590eb75ccc117f488e2fabd1bfa33c580e24b96f00658ad88e38844a040bb", size = 477363, upload-time = "2025-06-15T19:06:25.42Z" }, + { url = "https://files.pythonhosted.org/packages/69/c4/088825b75489cb5b6a761a4542645718893d395d8c530b38734f19da44d2/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05686b5487cfa2e2c28ff1aa370ea3e6c5accfe6435944ddea1e10d93872147", size = 452240, upload-time = "2025-06-15T19:06:26.552Z" }, + { url = "https://files.pythonhosted.org/packages/10/8c/22b074814970eeef43b7c44df98c3e9667c1f7bf5b83e0ff0201b0bd43f9/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d0e10e6f8f6dc5762adee7dece33b722282e1f59aa6a55da5d493a97282fedd8", size = 625607, upload-time = "2025-06-15T19:06:27.606Z" }, + { url = "https://files.pythonhosted.org/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315, upload-time = "2025-06-15T19:06:29.076Z" }, + { url = "https://files.pythonhosted.org/packages/be/7c/a3d7c55cfa377c2f62c4ae3c6502b997186bc5e38156bafcb9b653de9a6d/watchfiles-1.1.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a6fd40bbb50d24976eb275ccb55cd1951dfb63dbc27cae3066a6ca5f4beabd5", size = 406748, upload-time = "2025-06-15T19:06:44.2Z" }, + { url = "https://files.pythonhosted.org/packages/38/d0/c46f1b2c0ca47f3667b144de6f0515f6d1c670d72f2ca29861cac78abaa1/watchfiles-1.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f811079d2f9795b5d48b55a37aa7773680a5659afe34b54cc1d86590a51507d", size = 398801, upload-time = "2025-06-15T19:06:45.774Z" }, + { url = "https://files.pythonhosted.org/packages/70/9c/9a6a42e97f92eeed77c3485a43ea96723900aefa3ac739a8c73f4bff2cd7/watchfiles-1.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2726d7bfd9f76158c84c10a409b77a320426540df8c35be172444394b17f7ea", size = 451528, upload-time = "2025-06-15T19:06:46.791Z" }, + { url = "https://files.pythonhosted.org/packages/51/7b/98c7f4f7ce7ff03023cf971cd84a3ee3b790021ae7584ffffa0eb2554b96/watchfiles-1.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df32d59cb9780f66d165a9a7a26f19df2c7d24e3bd58713108b41d0ff4f929c6", size = 454095, upload-time = "2025-06-15T19:06:48.211Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910, upload-time = "2025-06-15T19:06:49.335Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816, upload-time = "2025-06-15T19:06:50.433Z" }, + { url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584, upload-time = "2025-06-15T19:06:51.834Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009, upload-time = "2025-06-15T19:06:52.896Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080, upload-time = "2025-03-05T20:01:37.304Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329, upload-time = "2025-03-05T20:01:39.668Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312, upload-time = "2025-03-05T20:01:41.815Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319, upload-time = "2025-03-05T20:01:43.967Z" }, + { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631, upload-time = "2025-03-05T20:01:46.104Z" }, + { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016, upload-time = "2025-03-05T20:01:47.603Z" }, + { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426, upload-time = "2025-03-05T20:01:48.949Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360, upload-time = "2025-03-05T20:01:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388, upload-time = "2025-03-05T20:01:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830, upload-time = "2025-03-05T20:01:53.922Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, + { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, + { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, + { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] diff --git a/mcp_servers/google_sheets/.env.example b/mcp_servers/google_sheets/.env.example new file mode 100644 index 00000000..57d82db5 --- /dev/null +++ b/mcp_servers/google_sheets/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +GOOGLE_SHEETS_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/google_sheets/Dockerfile b/mcp_servers/google_sheets/Dockerfile new file mode 100644 index 00000000..c9df371f --- /dev/null +++ b/mcp_servers/google_sheets/Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/google_sheets/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/google_sheets/server.py . +COPY mcp_servers/google_sheets/models.py . +COPY mcp_servers/google_sheets/utils.py . +COPY mcp_servers/google_sheets/exceptions.py . + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] diff --git a/mcp_servers/google_sheets/README.md b/mcp_servers/google_sheets/README.md new file mode 100644 index 00000000..a9842701 --- /dev/null +++ b/mcp_servers/google_sheets/README.md @@ -0,0 +1,78 @@ +# Google Sheets MCP Server + +A Model Context Protocol (MCP) server for Google Sheets integration. Read, write, and manage spreadsheet data using Google Sheets API with full OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Sheets with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_SHEETS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-sheets-mcp-server:latest + + +# Run Google Sheets MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-sheets-mcp-server:latest + + +# Run Google Sheets MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_google_access_token_here"}' \ + ghcr.io/klavis-ai/google-sheets-mcp-server:latest +``` + +**OAuth Setup:** Google Sheets requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Read Data**: Get spreadsheet values, ranges, and cell data +- **Write Data**: Update cells, rows, and ranges with new values +- **Sheet Management**: Create, delete, and manage worksheet tabs +- **Spreadsheet Info**: Get metadata, properties, and formatting details +- **Batch Operations**: Perform multiple read/write operations efficiently + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_sheets/exceptions.py b/mcp_servers/google_sheets/exceptions.py new file mode 100644 index 00000000..1b21229a --- /dev/null +++ b/mcp_servers/google_sheets/exceptions.py @@ -0,0 +1,13 @@ +# Error class for retryable errors +class RetryableToolError(Exception): + def __init__(self, message: str, additional_prompt_content: str = "", retry_after_ms: int = 1000, developer_message: str = ""): + super().__init__(message) + self.additional_prompt_content = additional_prompt_content + self.retry_after_ms = retry_after_ms + self.developer_message = developer_message + +# Error class for tool execution errors +class ToolExecutionError(Exception): + def __init__(self, message: str): + super().__init__(message) + self.message = message diff --git a/mcp_servers/google_sheets/models.py b/mcp_servers/google_sheets/models.py new file mode 100644 index 00000000..363d6af4 --- /dev/null +++ b/mcp_servers/google_sheets/models.py @@ -0,0 +1,267 @@ +import json +from datetime import date, datetime, time, timedelta +from enum import Enum +from typing import Optional +from zoneinfo import ZoneInfo + +from pydantic import BaseModel, field_validator, model_validator + +class CellErrorType(str, Enum): + """The type of error in a cell + + Implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/other#ErrorType + """ + + ERROR_TYPE_UNSPECIFIED = "ERROR_TYPE_UNSPECIFIED" # The default error type, do not use this. + ERROR = "ERROR" # Corresponds to the #ERROR! error. + NULL_VALUE = "NULL_VALUE" # Corresponds to the #NULL! error. + DIVIDE_BY_ZERO = "DIVIDE_BY_ZERO" # Corresponds to the #DIV/0 error. + VALUE = "VALUE" # Corresponds to the #VALUE! error. + REF = "REF" # Corresponds to the #REF! error. + NAME = "NAME" # Corresponds to the #NAME? error. + NUM = "NUM" # Corresponds to the #NUM! error. + N_A = "N_A" # Corresponds to the #N/A error. + LOADING = "LOADING" # Corresponds to the Loading... state. + + +class CellErrorValue(BaseModel): + """An error in a cell + + Implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/other#ErrorValue + """ + + type: CellErrorType + message: str + + +class CellExtendedValue(BaseModel): + """The kinds of value that a cell in a spreadsheet can have + + Implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/other#ExtendedValue + """ + + numberValue: float | None = None + stringValue: str | None = None + boolValue: bool | None = None + formulaValue: str | None = None + errorValue: Optional["CellErrorValue"] = None + + @model_validator(mode="after") + def check_exactly_one_value(cls, instance): # type: ignore[no-untyped-def] + provided = [v for v in instance.__dict__.values() if v is not None] + if len(provided) != 1: + raise ValueError( + "Exactly one of numberValue, stringValue, boolValue, " + "formulaValue, or errorValue must be set." + ) + return instance + + +class NumberFormatType(str, Enum): + NUMBER = "NUMBER" + PERCENT = "PERCENT" + CURRENCY = "CURRENCY" + + +class NumberFormat(BaseModel): + """The format of a number + + Implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/cells#NumberFormat + """ + + pattern: str + type: NumberFormatType + + +class CellFormat(BaseModel): + """The format of a cell + + Partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/cells#CellFormat + """ + + numberFormat: NumberFormat + + +class CellData(BaseModel): + """Data about a specific cell + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/cells#CellData + """ + + userEnteredValue: CellExtendedValue + userEnteredFormat: CellFormat | None = None + + +class RowData(BaseModel): + """Data about each cellin a row + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#RowData + """ + + values: list[CellData] + + +class GridData(BaseModel): + """Data in the grid + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#GridData + """ + + startRow: int + startColumn: int + rowData: list[RowData] + + +class GridProperties(BaseModel): + """Properties of a grid + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#GridProperties + """ + + rowCount: int + columnCount: int + + +class SheetProperties(BaseModel): + """Properties of a Sheet + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#SheetProperties + """ + + sheetId: int + title: str + gridProperties: GridProperties | None = None + + +class Sheet(BaseModel): + """A Sheet in a spreadsheet + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#Sheet + """ + + properties: SheetProperties + data: list[GridData] | None = None + + +class SpreadsheetProperties(BaseModel): + """Properties of a spreadsheet + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets#SpreadsheetProperties + """ + + title: str + + +class Spreadsheet(BaseModel): + """A spreadsheet + + A partial implementation of https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets + """ + + properties: SpreadsheetProperties + sheets: list[Sheet] + + +CellValue = int | float | str | bool + + +class SheetDataInput(BaseModel): + """ + SheetDataInput models the cell data of a spreadsheet in a custom format. + + It is a dictionary mapping row numbers (as ints) to dictionaries that map + column letters (as uppercase strings) to cell values (int, float, str, or bool). + + This model enforces that: + - The outer keys are convertible to int. + - The inner keys are alphabetic strings (normalized to uppercase). + - All cell values are only of type int, float, str, or bool. + + The model automatically serializes (via `json_data()`) + and validates the inner types. + """ + + data: dict[int, dict[str, CellValue]] + + @classmethod + def _parse_json_if_string(cls, value): # type: ignore[no-untyped-def] + """Parses the value if it is a JSON string, otherwise returns it. + + Helper method for when validating the `data` field. + """ + if isinstance(value, str): + try: + return json.loads(value) + except json.JSONDecodeError as e: + raise TypeError(f"Invalid JSON: {e}") + return value + + @classmethod + def _validate_row_key(cls, row_key) -> int: # type: ignore[no-untyped-def] + """Converts the row key to an integer, raising an error if conversion fails. + + Helper method for when validating the `data` field. + """ + try: + return int(row_key) + except (ValueError, TypeError): + raise TypeError(f"Row key '{row_key}' is not convertible to int.") + + @classmethod + def _validate_inner_cells(cls, cells, row_int: int) -> dict: # type: ignore[no-untyped-def] + """Validates that 'cells' is a dict mapping column letters to valid cell values + and normalizes the keys. + + Helper method for when validating the `data` field. + """ + if not isinstance(cells, dict): + raise TypeError( + f"Value for row '{row_int}' must be a dict mapping column letters to cell values." + ) + new_inner = {} + for col_key, cell_value in cells.items(): + if not isinstance(col_key, str): + raise TypeError(f"Column key '{col_key}' must be a string.") + col_string = col_key.upper() + if not col_string.isalpha(): + raise TypeError(f"Column key '{col_key}' is invalid. Must be alphabetic.") + if not isinstance(cell_value, int | float | str | bool): + raise TypeError( + f"Cell value for {col_string}{row_int} must be an int, float, str, or bool." + ) + new_inner[col_string] = cell_value + return new_inner + + @field_validator("data", mode="before") + @classmethod + def validate_and_convert_keys(cls, value): # type: ignore[no-untyped-def] + """ + Validates data when SheetDataInput is instantiated and converts it to the correct format. + Uses private helper methods to parse JSON, validate row keys, and validate inner cell data. + """ + if value is None: + return {} + + value = cls._parse_json_if_string(value) + if isinstance(value, dict): + new_value = {} + for row_key, cells in value.items(): + row_int = cls._validate_row_key(row_key) + inner_cells = cls._validate_inner_cells(cells, row_int) + new_value[row_int] = inner_cells + return new_value + + raise TypeError("data must be a dict or a valid JSON string representing a dict") + + def json_data(self) -> str: + """ + Serialize the sheet data to a JSON string. + """ + return json.dumps(self.data) + + @classmethod + def from_json(cls, json_str: str) -> "SheetDataInput": + """ + Create a SheetData instance from a JSON string. + """ + return cls.model_validate_json(json_str) diff --git a/mcp_servers/google_sheets/requirements.txt b/mcp_servers/google_sheets/requirements.txt new file mode 100644 index 00000000..ffdfbc1a --- /dev/null +++ b/mcp_servers/google_sheets/requirements.txt @@ -0,0 +1,13 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +google-auth +google-auth-oauthlib +google-auth-httplib2 +google-api-python-client +click +starlette +packaging diff --git a/mcp_servers/google_sheets/server.py b/mcp_servers/google_sheets/server.py new file mode 100644 index 00000000..40dc02eb --- /dev/null +++ b/mcp_servers/google_sheets/server.py @@ -0,0 +1,578 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +from exceptions import RetryableToolError +from models import ( + SheetDataInput, + Spreadsheet, + SpreadsheetProperties, +) +from utils import ( + create_sheet, + parse_get_spreadsheet_response, + parse_write_to_cell_response, + validate_write_to_cell_params, +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +GOOGLE_SHEETS_MCP_SERVER_PORT = int(os.getenv("GOOGLE_SHEETS_MCP_SERVER_PORT", "5000")) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +def get_sheets_service(access_token: str): + """Create Google Sheets service with access token.""" + credentials = Credentials(token=access_token) + return build('sheets', 'v4', credentials=credentials) + +# This is used for the list_all_sheets tool +def get_drive_service(access_token: str): + """Create Google Drive service with access token.""" + credentials = Credentials(token=access_token) + return build('drive', 'v3', credentials=credentials) + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +def get_auth_token_or_empty() -> str: + """Get the authentication token from context or return empty string.""" + try: + return auth_token_context.get() + except LookupError: + return "" + +# Context class to mock the context.get_auth_token_or_empty() calls +class Context: + def get_auth_token_or_empty(self) -> str: + return get_auth_token_or_empty() + +context = Context() + +async def create_spreadsheet_tool( + title: str = "Untitled spreadsheet", + data: str | None = None, +) -> Dict[str, Any]: + """Create a new spreadsheet with the provided title and data in its first sheet.""" + logger.info(f"Executing tool: create_spreadsheet with title: {title}") + try: + access_token = get_auth_token() + service = get_sheets_service(access_token) + + try: + sheet_data = SheetDataInput(data=data) # type: ignore[arg-type] + except Exception as e: + msg = "Invalid JSON or unexpected data format for parameter `data`" + raise RetryableToolError( + message=msg, + additional_prompt_content=f"{msg}: {e}", + retry_after_ms=100, + ) + + spreadsheet = Spreadsheet( + properties=SpreadsheetProperties(title=title), + sheets=[create_sheet(sheet_data)], + ) + + body = spreadsheet.model_dump() + + response = ( + service.spreadsheets() + .create(body=body, fields="spreadsheetId,spreadsheetUrl,properties/title") + .execute() + ) + + return { + "title": response["properties"]["title"], + "spreadsheetId": response["spreadsheetId"], + "spreadsheetUrl": response["spreadsheetUrl"], + } + except HttpError as e: + logger.error(f"Google Sheets API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Sheets API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_spreadsheet: {e}") + raise e + +async def get_spreadsheet_tool(spreadsheet_id: str) -> Dict[str, Any]: + """Get the user entered values and formatted values for all cells in all sheets in the spreadsheet.""" + logger.info(f"Executing tool: get_spreadsheet with spreadsheet_id: {spreadsheet_id}") + try: + access_token = get_auth_token() + service = get_sheets_service(access_token) + + response = ( + service.spreadsheets() + .get( + spreadsheetId=spreadsheet_id, + includeGridData=True, + fields="spreadsheetId,spreadsheetUrl,properties/title,sheets/properties,sheets/data/rowData/values/userEnteredValue,sheets/data/rowData/values/formattedValue,sheets/data/rowData/values/effectiveValue", + ) + .execute() + ) + return parse_get_spreadsheet_response(response) + except HttpError as e: + logger.error(f"Google Sheets API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Sheets API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_spreadsheet: {e}") + raise e + +async def write_to_cell_tool( + spreadsheet_id: str, + column: str, + row: int, + value: str, + sheet_name: str = "Sheet1", +) -> Dict[str, Any]: + """Write a value to a single cell in a spreadsheet.""" + logger.info(f"Executing tool: write_to_cell with spreadsheet_id: {spreadsheet_id}, cell: {column}{row}") + try: + access_token = get_auth_token() + service = get_sheets_service(access_token) + + validate_write_to_cell_params(service, spreadsheet_id, sheet_name, column, row) + + range_ = f"'{sheet_name}'!{column.upper()}{row}" + body = { + "range": range_, + "majorDimension": "ROWS", + "values": [[value]], + } + + sheet_properties = ( + service.spreadsheets() + .values() + .update( + spreadsheetId=spreadsheet_id, + range=range_, + valueInputOption="USER_ENTERED", + includeValuesInResponse=True, + body=body, + ) + .execute() + ) + + return parse_write_to_cell_response(sheet_properties) + except HttpError as e: + logger.error(f"Google Sheets API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Sheets API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool write_to_cell: {e}") + raise e + +async def list_all_sheets_tool() -> Dict[str, Any]: + """List all Google Sheets spreadsheets in the user's Google Drive.""" + logger.info("Executing tool: list_all_sheets") + try: + access_token = get_auth_token() + service = get_drive_service(access_token) + + # Search for Google Sheets files (mimeType for Google Sheets) + query = "mimeType='application/vnd.google-apps.spreadsheet'" + + results = service.files().list( + q=query, + fields="files(id,name,createdTime,modifiedTime,owners,webViewLink)", + orderBy="modifiedTime desc" + ).execute() + + files = results.get('files', []) + + spreadsheets = [] + for file in files: + spreadsheet_info = { + "id": file.get('id'), + "name": file.get('name'), + "createdTime": file.get('createdTime'), + "modifiedTime": file.get('modifiedTime'), + "webViewLink": file.get('webViewLink'), + "owners": [owner.get('displayName', owner.get('emailAddress', 'Unknown')) + for owner in file.get('owners', [])] + } + spreadsheets.append(spreadsheet_info) + + return { + "spreadsheets": spreadsheets, + "total_count": len(spreadsheets) + } + + except HttpError as e: + logger.error(f"Google Drive API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"Google Drive API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool list_all_sheets: {e}") + raise e + +@click.command() +@click.option("--port", default=GOOGLE_SHEETS_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("google-sheets-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="google_sheets_create_spreadsheet", + description="Create a new spreadsheet with a title and optional data.", + inputSchema={ + "type": "object", + "required": ["title"], + "properties": { + "title": { + "type": "string", + "description": "The title of the new spreadsheet.", + }, + "data": { + "type": "string", + "description": "The data to write to the spreadsheet. A JSON string (property names enclosed in double quotes) representing a dictionary that maps row numbers to dictionaries that map column letters to cell values. For example, data[23]['C'] would be the value of the cell in row 23, column C. Type hint: dict[int, dict[str, Union[int, float, str, bool]]]", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_SHEETS_SPREADSHEET"} + ), + ), + types.Tool( + name="google_sheets_get_spreadsheet", + description="Retrieve spreadsheet properties and cell data for all sheets.", + inputSchema={ + "type": "object", + "required": ["spreadsheet_id"], + "properties": { + "spreadsheet_id": { + "type": "string", + "description": "The ID of the spreadsheet to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_SHEETS_SPREADSHEET", "readOnlyHint": True} + ), + ), + types.Tool( + name="google_sheets_write_to_cell", + description="Write a value to a specific cell in a spreadsheet.", + inputSchema={ + "type": "object", + "required": ["spreadsheet_id", "column", "row", "value"], + "properties": { + "spreadsheet_id": { + "type": "string", + "description": "The ID of the spreadsheet to write to.", + }, + "column": { + "type": "string", + "description": "The column string to write to. For example, 'A', 'F', or 'AZ'.", + }, + "row": { + "type": "integer", + "description": "The row number to write to.", + }, + "value": { + "type": "string", + "description": "The value to write to the cell.", + }, + "sheet_name": { + "type": "string", + "description": "The name of the sheet to write to. Defaults to 'Sheet1'.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_SHEETS_CELL"} + ), + ), + types.Tool( + name="google_sheets_list_all_sheets", + description="List all Google Sheets spreadsheets in the user's Google Drive.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "GOOGLE_SHEETS_SPREADSHEET", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if name == "google_sheets_create_spreadsheet": + title = arguments.get("title") + data = arguments.get("data") + if not title: + return [ + types.TextContent( + type="text", + text="Error: title parameter is required", + ) + ] + + try: + result = await create_spreadsheet_tool(title, data) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_sheets_get_spreadsheet": + spreadsheet_id = arguments.get("spreadsheet_id") + if not spreadsheet_id: + return [ + types.TextContent( + type="text", + text="Error: spreadsheet_id parameter is required", + ) + ] + + try: + result = await get_spreadsheet_tool(spreadsheet_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_sheets_write_to_cell": + spreadsheet_id = arguments.get("spreadsheet_id") + column = arguments.get("column") + row = arguments.get("row") + value = arguments.get("value") + sheet_name = arguments.get("sheet_name", "Sheet1") + + if not all([spreadsheet_id, column, row is not None, value is not None]): + return [ + types.TextContent( + type="text", + text="Error: spreadsheet_id, column, row, and value parameters are required", + ) + ] + + try: + result = await write_to_cell_tool(spreadsheet_id, column, row, value, sheet_name) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "google_sheets_list_all_sheets": + try: + result = await list_all_sheets_tool() + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/google_sheets/utils.py b/mcp_servers/google_sheets/utils.py new file mode 100644 index 00000000..fb0cd863 --- /dev/null +++ b/mcp_servers/google_sheets/utils.py @@ -0,0 +1,543 @@ + +from typing import Any + +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import Resource, build + +from exceptions import RetryableToolError, ToolExecutionError +from models import ( + CellData, + CellExtendedValue, + CellFormat, + CellValue, + GridData, + GridProperties, + NumberFormat, + NumberFormatType, + RowData, + Sheet, + SheetDataInput, + SheetProperties, +) + +DEFAULT_SEARCH_CONTACTS_LIMIT = 30 + +DEFAULT_SHEET_ROW_COUNT = 1000 +DEFAULT_SHEET_COLUMN_COUNT = 26 + +# ---------------------------------------------------------------- +# Sheets utils +# ---------------------------------------------------------------- + +def col_to_index(col: str) -> int: + """Convert a sheet's column string to a 0-indexed column index + + Args: + col (str): The column string to convert. e.g., "A", "AZ", "QED" + + Returns: + int: The 0-indexed column index. + """ + result = 0 + for char in col.upper(): + result = result * 26 + (ord(char) - ord("A") + 1) + return result - 1 + + +def index_to_col(index: int) -> str: + """Convert a 0-indexed column index to its corresponding column string + + Args: + index (int): The 0-indexed column index to convert. + + Returns: + str: The column string. e.g., "A", "AZ", "QED" + """ + result = "" + index += 1 + while index > 0: + index, rem = divmod(index - 1, 26) + result = chr(rem + ord("A")) + result + return result + + +def is_col_greater(col1: str, col2: str) -> bool: + """Determine if col1 represents a column that comes after col2 in a sheet + + This comparison is based on: + 1. The length of the column string (longer means greater). + 2. Lexicographical comparison if both strings are the same length. + + Args: + col1 (str): The first column string to compare. + col2 (str): The second column string to compare. + + Returns: + bool: True if col1 comes after col2, False otherwise. + """ + if len(col1) != len(col2): + return len(col1) > len(col2) + return col1.upper() > col2.upper() + + +def compute_sheet_data_dimensions( + sheet_data_input: SheetDataInput, +) -> tuple[tuple[int, int], tuple[int, int]]: + """ + Compute the dimensions of a sheet based on the data provided. + + Args: + sheet_data_input (SheetDataInput): + The data to compute the dimensions of. + + Returns: + tuple[tuple[int, int], tuple[int, int]]: The dimensions of the sheet. The first tuple + contains the row range (start, end) and the second tuple contains the column range + (start, end). + """ + max_row = 0 + min_row = 10_000_000 # max number of cells in a sheet + max_col_str = None + min_col_str = None + + for key, row in sheet_data_input.data.items(): + try: + row_num = int(key) + except ValueError: + continue + if row_num > max_row: + max_row = row_num + if row_num < min_row: + min_row = row_num + + if isinstance(row, dict): + for col in row: + # Update max column string + if max_col_str is None or is_col_greater(col, max_col_str): + max_col_str = col + # Update min column string + if min_col_str is None or is_col_greater(min_col_str, col): + min_col_str = col + + max_col_index = col_to_index(max_col_str) if max_col_str is not None else -1 + min_col_index = col_to_index(min_col_str) if min_col_str is not None else 0 + + return (min_row, max_row), (min_col_index, max_col_index) + + +def create_sheet(sheet_data_input: SheetDataInput) -> Sheet: + """Create a Google Sheet from a dictionary of data. + + Args: + sheet_data_input (SheetDataInput): The data to create the sheet from. + + Returns: + Sheet: The created sheet. + """ + (_, max_row), (min_col_index, max_col_index) = compute_sheet_data_dimensions(sheet_data_input) + sheet_data = create_sheet_data(sheet_data_input, min_col_index, max_col_index) + sheet_properties = create_sheet_properties( + row_count=max(DEFAULT_SHEET_ROW_COUNT, max_row), + column_count=max(DEFAULT_SHEET_COLUMN_COUNT, max_col_index + 1), + ) + + return Sheet(properties=sheet_properties, data=sheet_data) + + +def create_sheet_properties( + sheet_id: int = 1, + title: str = "Sheet1", + row_count: int = DEFAULT_SHEET_ROW_COUNT, + column_count: int = DEFAULT_SHEET_COLUMN_COUNT, +) -> SheetProperties: + """Create a SheetProperties object + + Args: + sheet_id (int): The ID of the sheet. + title (str): The title of the sheet. + row_count (int): The number of rows in the sheet. + column_count (int): The number of columns in the sheet. + + Returns: + SheetProperties: The created sheet properties object. + """ + return SheetProperties( + sheetId=sheet_id, + title=title, + gridProperties=GridProperties(rowCount=row_count, columnCount=column_count), + ) + + +def group_contiguous_rows(row_numbers: list[int]) -> list[list[int]]: + """Groups a sorted list of row numbers into contiguous groups + + A contiguous group is a list of row numbers that are consecutive integers. + For example, [1,2,3,5,6] is converted to [[1,2,3],[5,6]]. + + Args: + row_numbers (list[int]): The list of row numbers to group. + + Returns: + list[list[int]]: The grouped row numbers. + """ + if not row_numbers: + return [] + groups = [] + current_group = [row_numbers[0]] + for r in row_numbers[1:]: + if r == current_group[-1] + 1: + current_group.append(r) + else: + groups.append(current_group) + current_group = [r] + groups.append(current_group) + return groups + + +def create_cell_data(cell_value: CellValue) -> CellData: + """ + Create a CellData object based on the type of cell_value. + """ + if isinstance(cell_value, bool): + return _create_bool_cell(cell_value) + elif isinstance(cell_value, int): + return _create_int_cell(cell_value) + elif isinstance(cell_value, float): + return _create_float_cell(cell_value) + elif isinstance(cell_value, str): + return _create_string_cell(cell_value) + + +def _create_formula_cell(cell_value: str) -> CellData: + cell_val = CellExtendedValue(formulaValue=cell_value) + return CellData(userEnteredValue=cell_val) + + +def _create_currency_cell(cell_value: str) -> CellData: + value_without_symbol = cell_value[1:] + try: + num_value = int(value_without_symbol) + cell_format = CellFormat( + numberFormat=NumberFormat(type=NumberFormatType.CURRENCY, pattern="$#,##0") + ) + cell_val = CellExtendedValue(numberValue=num_value) + return CellData(userEnteredValue=cell_val, userEnteredFormat=cell_format) + except ValueError: + try: + num_value = float(value_without_symbol) # type: ignore[assignment] + cell_format = CellFormat( + numberFormat=NumberFormat(type=NumberFormatType.CURRENCY, pattern="$#,##0.00") + ) + cell_val = CellExtendedValue(numberValue=num_value) + return CellData(userEnteredValue=cell_val, userEnteredFormat=cell_format) + except ValueError: + return CellData(userEnteredValue=CellExtendedValue(stringValue=cell_value)) + + +def _create_percent_cell(cell_value: str) -> CellData: + try: + num_value = float(cell_value[:-1].strip()) + cell_format = CellFormat( + numberFormat=NumberFormat(type=NumberFormatType.PERCENT, pattern="0.00%") + ) + cell_val = CellExtendedValue(numberValue=num_value) + return CellData(userEnteredValue=cell_val, userEnteredFormat=cell_format) + except ValueError: + return CellData(userEnteredValue=CellExtendedValue(stringValue=cell_value)) + + +def _create_bool_cell(cell_value: bool) -> CellData: + return CellData(userEnteredValue=CellExtendedValue(boolValue=cell_value)) + + +def _create_int_cell(cell_value: int) -> CellData: + cell_format = CellFormat( + numberFormat=NumberFormat(type=NumberFormatType.NUMBER, pattern="#,##0") + ) + return CellData( + userEnteredValue=CellExtendedValue(numberValue=cell_value), userEnteredFormat=cell_format + ) + + +def _create_float_cell(cell_value: float) -> CellData: + cell_format = CellFormat( + numberFormat=NumberFormat(type=NumberFormatType.NUMBER, pattern="#,##0.00") + ) + return CellData( + userEnteredValue=CellExtendedValue(numberValue=cell_value), userEnteredFormat=cell_format + ) + + +def _create_string_cell(cell_value: str) -> CellData: + if cell_value.startswith("="): + return _create_formula_cell(cell_value) + elif cell_value.startswith("$") and len(cell_value) > 1: + return _create_currency_cell(cell_value) + elif cell_value.endswith("%") and len(cell_value) > 1: + return _create_percent_cell(cell_value) + + return CellData(userEnteredValue=CellExtendedValue(stringValue=cell_value)) + + +def create_row_data( + row_data: dict[str, CellValue], min_col_index: int, max_col_index: int +) -> RowData: + """Constructs RowData for a single row using the provided row_data. + + Args: + row_data (dict[str, CellValue]): The data to create the row from. + min_col_index (int): The minimum column index from the SheetDataInput. + max_col_index (int): The maximum column index from the SheetDataInput. + """ + row_cells = [] + for col_idx in range(min_col_index, max_col_index + 1): + col_letter = index_to_col(col_idx) + if col_letter in row_data: + cell_data = create_cell_data(row_data[col_letter]) + else: + cell_data = CellData(userEnteredValue=CellExtendedValue(stringValue="")) + row_cells.append(cell_data) + return RowData(values=row_cells) + + +def create_sheet_data( + sheet_data_input: SheetDataInput, + min_col_index: int, + max_col_index: int, +) -> list[GridData]: + """Create grid data from SheetDataInput by grouping contiguous rows and processing cells. + + Args: + sheet_data_input (SheetDataInput): The data to create the sheet from. + min_col_index (int): The minimum column index from the SheetDataInput. + max_col_index (int): The maximum column index from the SheetDataInput. + + Returns: + list[GridData]: The created grid data. + """ + row_numbers = list(sheet_data_input.data.keys()) + if not row_numbers: + return [] + + sorted_rows = sorted(row_numbers) + groups = group_contiguous_rows(sorted_rows) + + sheet_data = [] + for group in groups: + rows_data = [] + for r in group: + current_row_data = sheet_data_input.data.get(r, {}) + row = create_row_data(current_row_data, min_col_index, max_col_index) + rows_data.append(row) + grid_data = GridData( + startRow=group[0] - 1, # convert to 0-indexed + startColumn=min_col_index, + rowData=rows_data, + ) + sheet_data.append(grid_data) + + return sheet_data + + +def parse_get_spreadsheet_response(api_response: dict) -> dict: + """ + Parse the get spreadsheet Google Sheets API response into a structured dictionary. + """ + properties = api_response.get("properties", {}) + sheets = [parse_sheet(sheet) for sheet in api_response.get("sheets", [])] + + return { + "title": properties.get("title", ""), + "spreadsheetId": api_response.get("spreadsheetId", ""), + "spreadsheetUrl": api_response.get("spreadsheetUrl", ""), + "sheets": sheets, + } + + +def parse_sheet(api_sheet: dict) -> dict: + """ + Parse an individual sheet's data from the Google Sheets 'get spreadsheet' + API response into a structured dictionary. + """ + props = api_sheet.get("properties", {}) + grid_props = props.get("gridProperties", {}) + cell_data = convert_api_grid_data_to_dict(api_sheet.get("data", [])) + + return { + "sheetId": props.get("sheetId"), + "title": props.get("title", ""), + "rowCount": grid_props.get("rowCount", 0), + "columnCount": grid_props.get("columnCount", 0), + "data": cell_data, + } + + +def extract_user_entered_cell_value(cell: dict) -> Any: + """ + Extract the user entered value from a cell's 'userEnteredValue'. + + Args: + cell (dict): A cell dictionary from the grid data. + + Returns: + The extracted value if present, otherwise None. + """ + user_val = cell.get("userEnteredValue", {}) + for key in ["stringValue", "numberValue", "boolValue", "formulaValue"]: + if key in user_val: + return user_val[key] + + return "" + + +def process_row(row: dict, start_column_index: int) -> dict: + """ + Process a single row from grid data, converting non-empty cells into a dictionary + that maps column letters to cell values. + + Args: + row (dict): A row from the grid data. + start_column_index (int): The starting column index for this row. + + Returns: + dict: A mapping of column letters to cell values for non-empty cells. + """ + row_result = {} + for j, cell in enumerate(row.get("values", [])): + column_index = start_column_index + j + column_string = index_to_col(column_index) + user_entered_cell_value = extract_user_entered_cell_value(cell) + formatted_cell_value = cell.get("formattedValue", "") + + if user_entered_cell_value != "" or formatted_cell_value != "": + row_result[column_string] = { + "userEnteredValue": user_entered_cell_value, + "formattedValue": formatted_cell_value, + } + + return row_result + + +def convert_api_grid_data_to_dict(grids: list[dict]) -> dict: + """ + Convert a list of grid data dictionaries from the 'get spreadsheet' API + response into a structured cell dictionary. + + The returned dictionary maps row numbers to sub-dictionaries that map column letters + (e.g., 'A', 'B', etc.) to their corresponding non-empty cell values. + + Args: + grids (list[dict]): The list of grid data dictionaries from the API. + + Returns: + dict: A dictionary mapping row numbers to dictionaries of column letter/value pairs. + Only includes non-empty rows and non-empty cells. + """ + result = {} + for grid in grids: + start_row = grid.get("startRow", 0) + start_column = grid.get("startColumn", 0) + + for i, row in enumerate(grid.get("rowData", []), start=1): + current_row = start_row + i + row_data = process_row(row, start_column) + + if row_data: + result[current_row] = row_data + + return dict(sorted(result.items())) + + +def validate_write_to_cell_params( # type: ignore[no-any-unimported] + service: Resource, + spreadsheet_id: str, + sheet_name: str, + column: str, + row: int, +) -> None: + """Validates the input parameters for the write to cell tool. + + Args: + service (Resource): The Google Sheets service. + spreadsheet_id (str): The ID of the spreadsheet provided to the tool. + sheet_name (str): The name of the sheet provided to the tool. + column (str): The column to write to provided to the tool. + row (int): The row to write to provided to the tool. + + Raises: + RetryableToolError: + If the sheet name is not found in the spreadsheet + ToolExecutionError: + If the column is not alphabetical + If the row is not a positive number + If the row is out of bounds for the sheet + If the column is out of bounds for the sheet + """ + if not column.isalpha(): + raise ToolExecutionError( + message=( + f"Invalid column name {column}. " + "It must be a non-empty string containing only letters" + ), + ) + + if row < 1: + raise ToolExecutionError( + message=(f"Invalid row number {row}. It must be a positive integer greater than 0."), + ) + + sheet_properties = ( + service.spreadsheets() + .get( + spreadsheetId=spreadsheet_id, + includeGridData=True, + fields="sheets/properties/title,sheets/properties/gridProperties/rowCount,sheets/properties/gridProperties/columnCount", + ) + .execute() + ) + + target_sheet = None + for sheet in sheet_properties["sheets"]: + if sheet["properties"]["title"] == sheet_name: + target_sheet = sheet + break + + sheet_names = [sheet["properties"]["title"] for sheet in sheet_properties["sheets"]] + + if target_sheet is None: + raise RetryableToolError( + message=f"Sheet name {sheet_name} not found in spreadsheet with id {spreadsheet_id}", + additional_prompt_content=f"Sheet names in the spreadsheet: {sheet_names}", + retry_after_ms=100, + ) + + sheet_row_count = target_sheet["properties"]["gridProperties"]["rowCount"] + sheet_column_count = target_sheet["properties"]["gridProperties"]["columnCount"] + + if row > sheet_row_count: + raise ToolExecutionError( + message=( + f"Row {row} is out of bounds for sheet {sheet_name} " + f"in spreadsheet with id {spreadsheet_id}. " + f"Sheet only has {sheet_row_count} rows which is less than the requested row {row}" + ) + ) + + if col_to_index(column) > sheet_column_count: + raise ToolExecutionError( + message=( + f"Column {column} is out of bounds for sheet {sheet_name} " + f"in spreadsheet with id {spreadsheet_id}. " + f"Sheet only has {sheet_column_count} columns which " + f"is less than the requested column {column}" + ) + ) + + +def parse_write_to_cell_response(response: dict) -> dict: + return { + "spreadsheetId": response["spreadsheetId"], + "sheetTitle": response["updatedData"]["range"].split("!")[0], + "updatedCell": response["updatedData"]["range"].split("!")[1], + "value": response["updatedData"]["values"][0][0], + } diff --git a/mcp_servers/google_slides/.env.example b/mcp_servers/google_slides/.env.example new file mode 100644 index 00000000..b5e7a797 --- /dev/null +++ b/mcp_servers/google_slides/.env.example @@ -0,0 +1,6 @@ +# Server configuration +GOOGLE_SLIDES_MCP_SERVER_PORT=5000 + +# Google Cloud credentials path (optional) +# If not provided, will look for credentials.json or service-account.json in the current directory +# GOOGLE_APPLICATION_CREDENTIALS=/path/to/credentials.json diff --git a/mcp_servers/google_slides/Dockerfile b/mcp_servers/google_slides/Dockerfile new file mode 100644 index 00000000..9d3b36e8 --- /dev/null +++ b/mcp_servers/google_slides/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Copy requirements and install dependencies +COPY mcp_servers/google_slides/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the rest of the application +COPY mcp_servers/google_slides/ . + +# Set the default port +ENV GOOGLE_SLIDES_MCP_SERVER_PORT=5000 + +# Command to run +CMD ["python", "server.py"] diff --git a/mcp_servers/google_slides/README.md b/mcp_servers/google_slides/README.md new file mode 100644 index 00000000..9eef0104 --- /dev/null +++ b/mcp_servers/google_slides/README.md @@ -0,0 +1,78 @@ +# Google Slides MCP Server + +A Model Context Protocol (MCP) server for Google Slides integration. Create, edit, and manage presentations using Google Slides API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Google Slides with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("GOOGLE_SLIDES", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/google-slides-mcp-server:latest + + +# Run Google Slides MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/google-slides-mcp-server:latest + + +# Run Google Slides MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_google_access_token_here"}' \ + ghcr.io/klavis-ai/google-slides-mcp-server:latest +``` + +**OAuth Setup:** Google Slides requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Presentation Management**: Create, read, update Google Slides presentations +- **Slide Operations**: Add, remove, and modify slides and layouts +- **Content Editing**: Insert text, images, and shapes into presentations +- **Formatting**: Apply themes, styles, and formatting to slides +- **Collaboration**: Manage sharing and collaborative editing + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/google_slides/requirements.txt b/mcp_servers/google_slides/requirements.txt new file mode 100644 index 00000000..6977af37 --- /dev/null +++ b/mcp_servers/google_slides/requirements.txt @@ -0,0 +1,13 @@ +# Server dependencies +starlette==0.47.2 +uvicorn==0.30.0 +click==8.1.7 +python-dotenv==1.0.1 + +# MCP framework +mcp==1.11.0 + +# Google API dependencies +google-api-python-client==2.116.0 +google-auth-httplib2==0.1.1 +google-auth-oauthlib==1.2.0 diff --git a/mcp_servers/google_slides/server.py b/mcp_servers/google_slides/server.py new file mode 100644 index 00000000..78c0e9f5 --- /dev/null +++ b/mcp_servers/google_slides/server.py @@ -0,0 +1,824 @@ +import os +import base64 +import json +import uuid +import logging +import contextlib +from collections.abc import AsyncIterator +from typing import List, Optional, Dict, Any +from contextvars import ContextVar + +import click +from dotenv import load_dotenv +from googleapiclient.discovery import build +from google.oauth2.credentials import Credentials +from google_auth_oauthlib.flow import InstalledAppFlow +from google.auth.transport.requests import Request as GoogleRequest +from google.oauth2 import service_account +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +load_dotenv() + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("google-slides-mcp-server") + +# Constants +SCOPES = ['https://www.googleapis.com/auth/presentations', 'https://www.googleapis.com/auth/drive.readonly'] +GOOGLE_SLIDES_MCP_SERVER_PORT = int(os.getenv("GOOGLE_SLIDES_MCP_SERVER_PORT", "5000")) + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +def get_slides_service(access_token: str): + """Create Google Slides service with access token.""" + credentials = Credentials(token=access_token) + return build('slides', 'v1', credentials=credentials) + +def get_drive_service(access_token: str): + """Create Google Drive service with access token.""" + credentials = Credentials(token=access_token) + return build('drive', 'v3', credentials=credentials) + +def get_credentials(): + """ + Gets Google API credentials from service account or OAuth2 flow. + Returns credentials object for use with Google APIs. + """ + # Try to get token from context first + try: + access_token = get_auth_token() + if access_token: + return Credentials(token=access_token) + except RuntimeError: + pass # No token in context, fall back to other methods + creds = None + # Check if we have service account credentials + if os.path.exists('service-account.json'): + return service_account.Credentials.from_service_account_file( + 'service-account.json', scopes=SCOPES) + + # Check if we have saved credentials + if os.path.exists('token.json'): + creds = Credentials.from_authorized_user_info( + json.loads(open('token.json').read()), SCOPES) + + # If there are no valid credentials, or they're expired + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(GoogleRequest()) + else: + # Load client secrets + if os.path.exists('credentials.json'): + flow = InstalledAppFlow.from_client_secrets_file( + 'credentials.json', SCOPES) + creds = flow.run_local_server(port=0) + else: + raise Exception("No credentials found. Please set up credentials.") + + # Save the credentials + with open('token.json', 'w') as token: + token.write(creds.to_json()) + + return creds + +async def create_presentation(title: str) -> str: + """ + Creates a new Google Slides presentation with the specified title. + + Args: + title: The title of the new presentation + + Returns: + A URL to the created presentation + """ + try: + creds = get_credentials() + service = build('slides', 'v1', credentials=creds) + + presentation = { + 'title': title + } + + presentation = service.presentations().create(body=presentation).execute() + presentation_id = presentation.get('presentationId') + + return f"Presentation created: https://docs.google.com/presentation/d/{presentation_id}/edit" + except Exception as e: + logger.error(f"Error creating presentation: {e}") + return f"Error creating presentation: {str(e)}" + +async def add_slide(presentation_id: str, title: Optional[str] = None, content: Optional[str] = None) -> str: + """ + Adds a new slide to an existing presentation. + + Args: + presentation_id: The ID of the presentation to add a slide to + title: Optional title for the slide + content: Optional content for the slide body + + Returns: + A message indicating the result of the operation + """ + try: + creds = get_credentials() + service = build('slides', 'v1', credentials=creds) + + # Create a blank slide + requests = [ + { + 'createSlide': { + 'objectId': str(uuid.uuid4()), + 'insertionIndex': '1', + 'slideLayoutReference': { + 'predefinedLayout': 'TITLE_AND_BODY' + } + } + } + ] + + response = service.presentations().batchUpdate( + presentationId=presentation_id, + body={'requests': requests} + ).execute() + + slide_id = response.get('replies', [{}])[0].get('createSlide', {}).get('objectId') + + # If title or content provided, add them in a second request + if title or content: + content_requests = [] + + if title: + content_requests.append({ + 'insertText': { + 'objectId': slide_id, + 'insertionIndex': 0, + 'text': title + } + }) + + if content: + content_requests.append({ + 'insertText': { + 'objectId': slide_id, + 'insertionIndex': 0, + 'text': content + } + }) + + if content_requests: + service.presentations().batchUpdate( + presentationId=presentation_id, + body={'requests': content_requests} + ).execute() + + return f"Slide added to presentation: https://docs.google.com/presentation/d/{presentation_id}/edit" + except Exception as e: + logger.error(f"Error adding slide: {e}") + return f"Error adding slide: {str(e)}" + +async def list_presentations() -> str: + """ + Lists all available presentations in the user's Google Drive. + + Returns: + A formatted string listing all presentations + """ + try: + creds = get_credentials() + drive_service = build('drive', 'v3', credentials=creds) + + # Query for Google Slides files + results = drive_service.files().list( + q="mimeType='application/vnd.google-apps.presentation'", + pageSize=10, + fields="files(id, name, webViewLink)" + ).execute() + + presentations = results.get('files', []) + + if not presentations: + return "No presentations found." + + result = "Available presentations:\n\n" + for p in presentations: + result += f"- {p.get('name')}: {p.get('webViewLink')}\n" + + return result + except Exception as e: + logger.error(f"Error listing presentations: {e}") + return f"Error listing presentations: {str(e)}" + +async def get_presentation(presentation_id: str, fields: Optional[str] = None) -> str: + """ + Retrieves detailed information about a specific presentation. + + Args: + presentation_id: The ID of the presentation to retrieve + fields: Optional field mask to limit the returned data (e.g., "slides,pageSize") + + Returns: + A formatted string with presentation details + """ + try: + creds = get_credentials() + service = build('slides', 'v1', credentials=creds) + + # Set default fields if none specified + if not fields: + fields = "presentationId,title,revisionId,slides,pageSize" + + # Retrieve the presentation + presentation = service.presentations().get( + presentationId=presentation_id, + fields=fields + ).execute() + + # Format the response + title = presentation.get('title', 'Untitled') + slide_count = len(presentation.get('slides', [])) + revision_id = presentation.get('revisionId', 'Unknown') + page_size = presentation.get('pageSize', {}) + width = page_size.get('width', {}).get('magnitude', 0) + height = page_size.get('height', {}).get('magnitude', 0) + + result = f"Presentation: {title}\n" + result += f"ID: {presentation_id}\n" + result += f"Slides: {slide_count}\n" + result += f"Revision ID: {revision_id}\n" + result += f"Page Size: {width}x{height}\n\n" + + if 'slides' in fields.split(',') and slide_count > 0: + result += "Slide Overview:\n" + for i, slide in enumerate(presentation.get('slides', [])): + slide_id = slide.get('objectId', 'Unknown') + result += f"Slide {i+1} (ID: {slide_id})\n" + + return result + except Exception as e: + logger.error(f"Error retrieving presentation: {e}") + return f"Error retrieving presentation: {str(e)}" + +async def batch_update_presentation(presentation_id: str, requests: List[Dict]) -> str: + """ + Applies a series of updates to a presentation. + This is the primary method for modifying slides (adding text, shapes, images, creating slides, etc.) + + Args: + presentation_id: The ID of the presentation to update + requests: List of request objects defining the updates + + Returns: + A message indicating the result of the operation + """ + try: + creds = get_credentials() + service = build('slides', 'v1', credentials=creds) + + # Execute the batch update + response = service.presentations().batchUpdate( + presentationId=presentation_id, + body={'requests': requests} + ).execute() + + # Format the response + replies = response.get('replies', []) + result = f"Successfully applied {len(replies)} updates to presentation\n" + result += f"Presentation URL: https://docs.google.com/presentation/d/{presentation_id}/edit\n\n" + + # Add information about created slides if any + created_slides = [r.get('createSlide', {}).get('objectId') + for r in replies if 'createSlide' in r] + if created_slides: + result += f"Created {len(created_slides)} new slides with IDs: {', '.join(created_slides)}\n" + + return result + except Exception as e: + logger.error(f"Error updating presentation: {e}") + return f"Error updating presentation: {str(e)}" + +async def summarize_presentation(presentation_id: str, include_notes: bool = False) -> str: + """ + Extracts and formats all text content from a presentation for easier summarization. + + Args: + presentation_id: The ID of the presentation to summarize + include_notes: Whether to include speaker notes in the summary + + Returns: + A formatted string containing the presentation's text content + """ + try: + creds = get_credentials() + service = build('slides', 'v1', credentials=creds) + + # Retrieve the presentation with all text elements + presentation = service.presentations().get( + presentationId=presentation_id + ).execute() + + title = presentation.get('title', 'Untitled') + slides = presentation.get('slides', []) + + result = f"Summary of: {title}\n" + result += f"Total Slides: {len(slides)}\n\n" + + # Process each slide + for i, slide in enumerate(slides): + slide_id = slide.get('objectId', 'Unknown') + result += f"Slide {i+1} (ID: {slide_id}):\n" + + # Extract text from text elements + text_elements = [] + page_elements = slide.get('pageElements', []) + + for element in page_elements: + if 'shape' in element and 'text' in element['shape']: + shape_text = "" + text_runs = element['shape']['text'].get('textElements', []) + + for text_run in text_runs: + if 'textRun' in text_run and 'content' in text_run['textRun']: + shape_text += text_run['textRun']['content'] + + if shape_text.strip(): + text_elements.append(shape_text.strip()) + + # Add the text content + if text_elements: + for text in text_elements: + result += f" {text}\n" + else: + result += " [No text content]\n" + + # Add speaker notes if requested + if include_notes and 'slideProperties' in slide and 'notesPage' in slide['slideProperties']: + notes_page = slide['slideProperties']['notesPage'] + notes_text = "" + + if 'pageElements' in notes_page: + for element in notes_page['pageElements']: + if 'shape' in element and 'text' in element['shape']: + text_runs = element['shape']['text'].get('textElements', []) + + for text_run in text_runs: + if 'textRun' in text_run and 'content' in text_run['textRun']: + notes_text += text_run['textRun']['content'] + + if notes_text.strip(): + result += f" Notes: {notes_text.strip()}\n" + + result += "\n" + + return result + except Exception as e: + logger.error(f"Error summarizing presentation: {e}") + return f"Error summarizing presentation: {str(e)}" + +@click.command() +@click.option("--port", default=GOOGLE_SLIDES_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server( + "google-slides-mcp-server", + instructions="Create and manage Google Slides presentations. You can create new presentations, add slides to existing presentations, and list available presentations.", + ) + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="create_presentation", + description="Create a new Google Slides presentation with the specified title.", + inputSchema={ + "type": "object", + "required": ["title"], + "properties": { + "title": { + "type": "string", + "description": "The title of the new presentation." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_PRESENTATION"}), + ), + types.Tool( + name="add_slide", + description="Add a new slide to an existing presentation.", + inputSchema={ + "type": "object", + "required": ["presentation_id"], + "properties": { + "presentation_id": { + "type": "string", + "description": "The ID of the presentation to add a slide to." + }, + "title": { + "type": "string", + "description": "Optional title for the slide." + }, + "content": { + "type": "string", + "description": "Optional content for the slide body." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_SLIDE"}), + ), + types.Tool( + name="list_presentations", + description="List all available presentations in the user's Google Drive.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_PRESENTATION", "readOnlyHint": True}), + ), + types.Tool( + name="get_presentation", + description="Retrieves detailed information about a specific presentation.", + inputSchema={ + "type": "object", + "required": ["presentation_id"], + "properties": { + "presentation_id": { + "type": "string", + "description": "The ID of the presentation to retrieve." + }, + "fields": { + "type": "string", + "description": "Optional field mask to limit the returned data (e.g., 'slides,pageSize')." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_PRESENTATION", "readOnlyHint": True}), + ), + types.Tool( + name="batch_update_presentation", + description="Applies a series of updates to a presentation. This is the primary method for modifying slides.", + inputSchema={ + "type": "object", + "required": ["presentation_id", "requests"], + "properties": { + "presentation_id": { + "type": "string", + "description": "The ID of the presentation to update." + }, + "requests": { + "type": "array", + "description": "An array of request objects defining the updates. Refer to the Google Slides API batchUpdate documentation." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_PRESENTATION"}), + ), + types.Tool( + name="summarize_presentation", + description="Extracts and formats all text content from a presentation for easier summarization.", + inputSchema={ + "type": "object", + "required": ["presentation_id"], + "properties": { + "presentation_id": { + "type": "string", + "description": "The ID of the presentation to summarize." + }, + "include_notes": { + "type": "boolean", + "description": "Whether to include speaker notes in the summary." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "GOOGLE_SLIDES_PRESENTATION", "readOnlyHint": True}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "create_presentation": + title = arguments.get("title") + + if not title: + return [ + types.TextContent( + type="text", + text="Error: 'title' parameter is required" + ) + ] + + try: + result = await create_presentation(title) + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "add_slide": + presentation_id = arguments.get("presentation_id") + title = arguments.get("title") + content = arguments.get("content") + + if not presentation_id: + return [ + types.TextContent( + type="text", + text="Error: 'presentation_id' parameter is required" + ) + ] + + try: + result = await add_slide(presentation_id, title, content) + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "list_presentations": + try: + result = await list_presentations() + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "get_presentation": + presentation_id = arguments.get("presentation_id") + fields = arguments.get("fields") + + if not presentation_id: + return [ + types.TextContent( + type="text", + text="Error: 'presentation_id' parameter is required" + ) + ] + + try: + result = await get_presentation(presentation_id, fields) + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "batch_update_presentation": + presentation_id = arguments.get("presentation_id") + requests = arguments.get("requests") + + if not presentation_id: + return [ + types.TextContent( + type="text", + text="Error: 'presentation_id' parameter is required" + ) + ] + + if not requests: + return [ + types.TextContent( + type="text", + text="Error: 'requests' parameter is required" + ) + ] + + try: + result = await batch_update_presentation(presentation_id, requests) + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "summarize_presentation": + presentation_id = arguments.get("presentation_id") + include_notes = arguments.get("include_notes", False) + + if not presentation_id: + return [ + types.TextContent( + type="text", + text="Error: 'presentation_id' parameter is required" + ) + ] + + try: + result = await summarize_presentation(presentation_id, include_notes) + return [ + types.TextContent( + type="text", + text=result + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}" + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/hacker_news/Dockerfile b/mcp_servers/hacker_news/Dockerfile new file mode 100644 index 00000000..3c5d7da0 --- /dev/null +++ b/mcp_servers/hacker_news/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/hacker_news/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy server code and tools +COPY mcp_servers/hacker_news/server.py . +COPY mcp_servers/hacker_news/tools/ ./tools/ + +# Expose port (change if your server uses another) +EXPOSE 5000 + +# Run the server +CMD ["python", "server.py"] diff --git a/mcp_servers/hacker_news/README.md b/mcp_servers/hacker_news/README.md new file mode 100644 index 00000000..4c5de8d5 --- /dev/null +++ b/mcp_servers/hacker_news/README.md @@ -0,0 +1,73 @@ +# Hacker News MCP Server + +A Model Context Protocol (MCP) server for Hacker News integration. Access stories, comments, and user information from Hacker News using their public API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Hacker News with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("HACKER_NEWS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/hacker-news-mcp-server:latest + + +# Run Hacker News MCP Server (no authentication required) +docker run -p 5000:5000 \ + ghcr.io/klavis-ai/hacker-news-mcp-server:latest +``` + +**No Authentication:** Hacker News API is public and requires no authentication or API keys. + +## šŸ› ļø Available Tools + +- **Story Access**: Get top stories, new stories, and best stories +- **Comment Retrieval**: Access story comments and discussion threads +- **User Information**: Get user profiles and submission history +- **Search**: Search stories and comments by keywords +- **Live Data**: Access real-time Hacker News content + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/hacker_news/requirements.txt b/mcp_servers/hacker_news/requirements.txt new file mode 100644 index 00000000..fef25453 --- /dev/null +++ b/mcp_servers/hacker_news/requirements.txt @@ -0,0 +1 @@ +mcp==1.11.0 \ No newline at end of file diff --git a/mcp_servers/hacker_news/server.py b/mcp_servers/hacker_news/server.py new file mode 100644 index 00000000..c33b732c --- /dev/null +++ b/mcp_servers/hacker_news/server.py @@ -0,0 +1,494 @@ +import contextlib +import logging +import os +import json +from collections.abc import AsyncIterator + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + hackerNews_item, + hackerNews_user, + hackerNews_askstories, + hackerNews_jobstories, + hackerNews_showstories, + hackerNews_updates, + hackerNews_topstories, + hackerNews_newstories, + hackerNews_beststories +) + +# Configure logging +logger = logging.getLogger(__name__) + +HACKER_NEWS_MCP_SERVER_PORT = int(os.getenv("HACKER_NEWS_MCP_SERVER_PORT", "5000")) + +@click.command() +@click.option("--port", default=HACKER_NEWS_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("hacker-news-mcp-server") + + #----------------------------------------------------------------------------------------------- + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # For hackerNews_item + types.Tool( + name="hackerNews_item", + description="Fetch a Hacker News item by its numeric item_id.", + inputSchema={ + "type": "object", + "properties": { + "item_id": { + "type": "integer", + "description": "The item's unique identifier (e.g., 8863)" + } + }, + "required": ["item_id"] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_ITEM", "readOnlyHint": True}) + ), + + # For hackerNews_user + types.Tool( + name="hackerNews_user", + description="Fetch a Hacker News user by username.", + inputSchema={ + "type": "object", + "properties": { + "username": { + "type": "string", + "description": "The user's unique username (e.g., 'pg')" + } + }, + "required": ["username"] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_USER", "readOnlyHint": True}) + ), + # For hackerNews_topstories + types.Tool( + name="hackerNews_topstories", + description="Fetch top stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of top stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_beststories + types.Tool( + name="hackerNews_beststories", + description="Fetch best stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of best stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_newstories + types.Tool( + name="hackerNews_newstories", + description="Fetch newest stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of new stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_showstories + types.Tool( + name="hackerNews_showstories", + description="Fetch show stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of show stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_askstories + types.Tool( + name="hackerNews_askstories", + description="Fetch ask stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of ask stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_jobstories + types.Tool( + name="hackerNews_jobstories", + description="Fetch job stories details from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of job stories to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_STORY_LIST", "readOnlyHint": True}) + ), + + # For hackerNews_updates + types.Tool( + name="hackerNews_updates", + description="Fetch recent updates including items and profiles from Hacker News.", + inputSchema={ + "type": "object", + "properties": { + "count": { + "type": "integer", + "description": "Number of updates to fetch (default: 5)", + "default": 5 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "HACKER_NEWS_UPDATES", "readOnlyHint": True}) + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + # Hacker News Item + if name == "hackerNews_item": + try: + item_id = arguments.get("item_id") + if not item_id: + return [ + types.TextContent( + type="text", + text="Missing required parameter: item_id", + ) + ] + + result = await hackerNews_item(item_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_item: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News User + elif name == "hackerNews_user": + try: + username = arguments.get("username") + if not username: + return [ + types.TextContent( + type="text", + text="Missing required parameter: username", + ) + ] + + result = await hackerNews_user(username) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_user: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + # Hacker News Top Stories + elif name == "hackerNews_topstories": + try: + count = arguments.get("count", 5) + result = await hackerNews_topstories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_topstories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News Best Stories + elif name == "hackerNews_beststories": + try: + count = arguments.get("count", 5) + result = await hackerNews_beststories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_beststories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News New Stories + elif name == "hackerNews_newstories": + try: + count = arguments.get("count", 5) + result = await hackerNews_newstories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_newstories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News Show Stories + elif name == "hackerNews_showstories": + try: + count = arguments.get("count", 5) + result = await hackerNews_showstories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_showstories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News Ask Stories + elif name == "hackerNews_askstories": + try: + count = arguments.get("count", 5) + result = await hackerNews_askstories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_askstories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News Job Stories + elif name == "hackerNews_jobstories": + try: + count = arguments.get("count", 5) + result = await hackerNews_jobstories(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_jobstories: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Hacker News Updates + elif name == "hackerNews_updates": + try: + count = arguments.get("count", 5) + result = await hackerNews_updates(count) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error executing hackerNews_updates: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + + #----------------------------------------------------------------------------------------------- + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + await session_manager.handle_request(scope, receive, send) + + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/hacker_news/tools/__init__.py b/mcp_servers/hacker_news/tools/__init__.py new file mode 100644 index 00000000..5f9b57b3 --- /dev/null +++ b/mcp_servers/hacker_news/tools/__init__.py @@ -0,0 +1,24 @@ +from .hackerNews_mcp_tools import ( + hackerNews_item, + hackerNews_user, + hackerNews_askstories, + hackerNews_jobstories, + hackerNews_showstories, + hackerNews_updates, + hackerNews_topstories, + hackerNews_newstories, + hackerNews_beststories +) + +__all__ = [ + "hackerNews_item", + "hackerNews_user", + + "hackerNews_askstories", + "hackerNews_jobstories", + "hackerNews_showstories", + "hackerNews_updates", + "hackerNews_topstories", + "hackerNews_newstories", + "hackerNews_beststories" +] diff --git a/mcp_servers/hacker_news/tools/hackerNews_mcp_tools.py b/mcp_servers/hacker_news/tools/hackerNews_mcp_tools.py new file mode 100644 index 00000000..0ff914ab --- /dev/null +++ b/mcp_servers/hacker_news/tools/hackerNews_mcp_tools.py @@ -0,0 +1,234 @@ +import json +import httpx +import logging +from .helpers import ( + hackerNews_showstories_ids, + hackerNews_beststories_ids, + hackerNews_topstories_ids, + hackerNews_newstories_ids, + hackerNews_jobstories_ids, + hackerNews_askstories_ids, + hackerNews_updates_ids +) + +# Setup logging +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + +base_url = 'https://hacker-news.firebaseio.com/v0' + +async def hackerNews_item(item_id: int) -> dict: + """Fetch a Hacker News item by its numeric item_id. + + Args: + item_id (int): Required. The item's unique item_identifier (e.g., `8863`). + """ + url = f"{base_url}/item/{item_id}.json" + logger.info(f"Requesting item {item_id} from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item {item_id}") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get item {item_id}: {e}") + return {"error": f"An error occurred while requesting {e.request.url!r}."} + except Exception as e: + logger.error(f"Unexpected error when fetching item {item_id}: {e}") + return json.dumps({"error": str(e)}) + +async def hackerNews_user(username: str) -> dict: + """Fetch a Hacker News user by username. + + Args: + username (str): Required. The user's unique username (e.g., `'pg'`). + """ + url = f"{base_url}/user/{username}.json" + logger.info(f"Requesting user {username} from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item {username}") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get username {username}: {e}") + return {"error": f"An error occurred while requesting {e.request.url!r}."} + except Exception as e: + logger.error(f"Unexpected error when fetching user {username}: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_topstories(count:int=5) -> dict: + """ + Fetch top stories details. + + Required: + count: Number of top stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_topstories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_topstories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_beststories(count:int=5) -> dict: + """ + Fetch best stories details. + + Required: + count: Number of best stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_beststories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_beststories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_newstories(count:int=5) -> dict: + """ + Fetch new stories details. + + Required: + count: Number of new stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_newstories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_newstories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_showstories(count:int=5) -> dict: + """ + Fetch show stories details. + + Required: + count: Number of show stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_showstories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_showstories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_askstories(count:int=5) -> dict: + """ + Fetch ask stories details. + + Required: + count: Number of ask stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_askstories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_askstories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_jobstories(count:int=5) -> dict: + """ + Fetch job stories details. + + Required: + count: Number of job stories to fetch. + + Returns: + str: JSON string of stories or error. + """ + try: + ids = await hackerNews_jobstories_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + for item_id in ids[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps(news) + except Exception as e: + logger.error(f"Error in hackerNews_jobstories: {e}") + return json.dumps({"error": str(e)}) + + +async def hackerNews_updates(count:int=5) -> dict: + """ + Fetch updates: items and profiles. + + Required: + count: Number of updates to fetch. + + Returns: + str: JSON string with items and profiles or error. + """ + try: + ids = await hackerNews_updates_ids() + if 'error' in ids: + return json.dumps(ids) + news = [] + profiles = ids.get('profiles', [])[:count] + for item_id in ids.get('items', [])[:count]: + item = await hackerNews_item(item_id) + news.append(item) + return json.dumps({ + "items": news, + "profiles": profiles + }) + except Exception as e: + logger.error(f"Error in hackerNews_updates: {e}") + return json.dumps({"error": str(e)}) diff --git a/mcp_servers/hacker_news/tools/helpers.py b/mcp_servers/hacker_news/tools/helpers.py new file mode 100644 index 00000000..c3bcce45 --- /dev/null +++ b/mcp_servers/hacker_news/tools/helpers.py @@ -0,0 +1,204 @@ +import httpx +import logging +import json + +# Configure logging +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + +base_url = 'https://hacker-news.firebaseio.com/v0' +async def hackerNews_item(item_id: int) -> dict: + """Fetch a Hacker News item by its numeric item_id. + + Args: + item_id (int): Required. The item's unique item_identifier (e.g., `8863`). + """ + url = f"{base_url}/item/{item_id}.json" + logger.info(f"Requesting item {item_id} from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item {item_id}") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get item {item_id}: {e}") + return {"error": f"An error occurred while requesting {e.request.url!r}."} + except Exception as e: + logger.error(f"Unexpected error when fetching item {item_id}: {e}") + return json.dumps({"error": str(e)}) + +async def hackerNews_topstories_ids() -> list: + """ + Fetch top story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/topstories.json" + logger.info(f"Requesting topstories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get topstories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching topstories: {e}") + return {"error": str(e)} + + +async def hackerNews_newstories_ids() -> list: + """ + Fetch new story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/newstories.json" + logger.info(f"Requesting newstories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get newstories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching newstories: {e}") + return {"error": str(e)} + + +async def hackerNews_askstories_ids() -> list: + """ + Fetch ask story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/askstories.json" + logger.info(f"Requesting askstories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get askstories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching askstories: {e}") + return {"error": str(e)} + + +async def hackerNews_showstories_ids() -> list: + """ + Fetch show story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/showstories.json" + logger.info(f"Requesting showstories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get showstories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching showstories: {e}") + return {"error": str(e)} + + +async def hackerNews_jobstories_ids() -> list: + """ + Fetch job story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/jobstories.json" + logger.info(f"Requesting jobstories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get jobstories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching jobstories: {e}") + return {"error": str(e)} + + +async def hackerNews_updates_ids() -> dict: + """ + Fetch updates (new items and profiles) from Hacker News. + + Returns: + dict: Update JSON on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/updates.json" + logger.info(f"Requesting updates from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get updates: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching updates: {e}") + return {"error": str(e)} + + +async def hackerNews_beststories_ids() -> list: + """ + Fetch best story IDs from Hacker News. + + Returns: + list: List of IDs on success, + or {"error": "..."} on failure. + """ + url = f"{base_url}/beststories.json" + logger.info(f"Requesting beststories from {url}") + try: + # Use an async client to make a non-blocking request + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() # Raise an exception for 4xx/5xx responses + logger.info(f"Successfully fetched item") + return response.json() # This returns a dict + except httpx.RequestError as e: + logger.error(f"Could not get beststories: {e}") + return {"error": str(e)} + except Exception as e: + logger.error(f"Unexpected error when fetching beststories: {e}") + return {"error": str(e)} diff --git a/mcp_servers/heygen/.env.example b/mcp_servers/heygen/.env.example new file mode 100644 index 00000000..7eb9bcce --- /dev/null +++ b/mcp_servers/heygen/.env.example @@ -0,0 +1,6 @@ +# HeyGen API Key +# Get your API key from: https://app.heygen.com/space/settings/api +HEYGEN_API_KEY=your_heygen_api_key_here + +# HeyGen MCP Server Port (optional, defaults to 5000) +HEYGEN_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/heygen/Dockerfile b/mcp_servers/heygen/Dockerfile new file mode 100644 index 00000000..2100982e --- /dev/null +++ b/mcp_servers/heygen/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/hubspot/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/heygen/server.py . +COPY mcp_servers/heygen/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/heygen/README.md b/mcp_servers/heygen/README.md new file mode 100644 index 00000000..65b70873 --- /dev/null +++ b/mcp_servers/heygen/README.md @@ -0,0 +1,73 @@ +# HeyGen MCP Server + +A Model Context Protocol (MCP) server for HeyGen integration. Create AI-generated videos and avatars using HeyGen's API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to HeyGen with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("HEYGEN", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/heygen-mcp-server:latest + + +# Run HeyGen MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/heygen-mcp-server:latest +``` + +**API Key Setup:** Get your HeyGen API key from the [HeyGen Dashboard](https://app.heygen.com/). + +## šŸ› ļø Available Tools + +- **Video Generation**: Create AI-generated videos with avatars +- **Avatar Management**: Access and manage AI avatar templates +- **Voice Synthesis**: Generate speech with AI voices +- **Template Operations**: Use and customize video templates +- **Rendering Control**: Monitor video rendering and processing status + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/heygen/requirements.txt b/mcp_servers/heygen/requirements.txt new file mode 100644 index 00000000..2bc97c2b --- /dev/null +++ b/mcp_servers/heygen/requirements.txt @@ -0,0 +1,6 @@ +click>=8.0.0 +httpx>=0.25.0 +mcp==1.11.0 +python-dotenv>=1.0.0 +starlette>=0.32.0 +uvicorn>=0.24.0 \ No newline at end of file diff --git a/mcp_servers/heygen/server.py b/mcp_servers/heygen/server.py new file mode 100644 index 00000000..4d73c97b --- /dev/null +++ b/mcp_servers/heygen/server.py @@ -0,0 +1,605 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + heygen_get_remaining_credits, + heygen_get_voices, heygen_get_voice_locales, heygen_get_avatar_groups, heygen_get_avatars_in_avatar_group, heygen_list_avatars, + heygen_generate_avatar_video, heygen_get_avatar_video_status, + heygen_list_videos, heygen_delete_video +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +HEYGEN_MCP_SERVER_PORT = int(os.getenv("HEYGEN_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=HEYGEN_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("heygen-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Account Management + types.Tool( + name="heygen_get_remaining_credits", + description="Retrieve the remaining credits in your HeyGen account.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_ACCOUNT", "readOnlyHint": True}), + ), + # Assets - Voices + types.Tool( + name="heygen_get_voices", + description="Retrieve a list of available voices from the HeyGen API.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of voices to return (default: 20, max: 100).", + "default": 20, + "maximum": 100, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VOICE", "readOnlyHint": True}), + ), + types.Tool( + name="heygen_get_voice_locales", + description="Retrieve a list of available voice locales (languages) from the HeyGen API.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of voice locales to return (default: 20).", + "default": 20, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VOICE", "readOnlyHint": True}), + ), + # Assets - Avatars + types.Tool( + name="heygen_get_avatar_groups", + description="Retrieve a list of HeyGen avatar groups.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_AVATAR", "readOnlyHint": True}), + ), + types.Tool( + name="heygen_get_avatars_in_avatar_group", + description="Retrieve a list of avatars in a specific HeyGen avatar group.", + inputSchema={ + "type": "object", + "required": ["group_id"], + "properties": { + "group_id": { + "type": "string", + "description": "The ID of the avatar group.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_AVATAR", "readOnlyHint": True}), + ), + types.Tool( + name="heygen_list_avatars", + description="Retrieve a list of all available avatars from the HeyGen API. This includes your instant avatars and public avatars.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of avatars to return (default: 20).", + "default": 20, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_AVATAR", "readOnlyHint": True}), + ), + # Generation + types.Tool( + name="heygen_generate_avatar_video", + description="Generate a new avatar video with the specified avatar, text, and voice.", + inputSchema={ + "type": "object", + "required": ["avatar_id", "text", "voice_id"], + "properties": { + "avatar_id": { + "type": "string", + "description": "The ID of the avatar to use.", + }, + "text": { + "type": "string", + "description": "The text for the avatar to speak (max 1500 characters).", + "maxLength": 1500, + }, + "voice_id": { + "type": "string", + "description": "The ID of the voice to use.", + }, + "background_color": { + "type": "string", + "description": "Background color in hex format (default: #ffffff).", + "default": "#ffffff", + }, + "width": { + "type": "integer", + "description": "Video width in pixels (default: 1280).", + "default": 1280, + }, + "height": { + "type": "integer", + "description": "Video height in pixels (default: 720).", + "default": 720, + }, + "avatar_style": { + "type": "string", + "description": "Avatar style (default: normal).", + "default": "normal", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VIDEO"}), + ), + types.Tool( + name="heygen_get_avatar_video_status", + description="Retrieve the status of a video generated via the HeyGen API.", + inputSchema={ + "type": "object", + "required": ["video_id"], + "properties": { + "video_id": { + "type": "string", + "description": "The ID of the video to check status for.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VIDEO", "readOnlyHint": True}), + ), + # Management + types.Tool( + name="heygen_list_videos", + description="Retrieve a list of videos from your HeyGen account.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of videos to return (default: 20).", + "default": 20, + }, + "offset": { + "type": "integer", + "description": "Number of videos to skip (default: 0).", + "default": 0, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VIDEO", "readOnlyHint": True}), + ), + types.Tool( + name="heygen_delete_video", + description="Delete a video from your HeyGen account.", + inputSchema={ + "type": "object", + "required": ["video_id"], + "properties": { + "video_id": { + "type": "string", + "description": "The ID of the video to delete.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "HEYGEN_VIDEO"}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # Account Management + if name == "heygen_get_remaining_credits": + try: + result = await heygen_get_remaining_credits() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Assets - Voices + elif name == "heygen_get_voices": + limit = arguments.get("limit", 20) + + try: + result = await heygen_get_voices(limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "heygen_get_voice_locales": + limit = arguments.get("limit", 20) + + try: + result = await heygen_get_voice_locales(limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Assets - Avatars + elif name == "heygen_get_avatar_groups": + try: + result = await heygen_get_avatar_groups() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "heygen_get_avatars_in_avatar_group": + group_id = arguments.get("group_id") + if not group_id: + return [ + types.TextContent( + type="text", + text="Error: group_id parameter is required", + ) + ] + + try: + result = await heygen_get_avatars_in_avatar_group(group_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "heygen_list_avatars": + limit = arguments.get("limit", 20) + + try: + result = await heygen_list_avatars(limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Generation + elif name == "heygen_generate_avatar_video": + avatar_id = arguments.get("avatar_id") + text = arguments.get("text") + voice_id = arguments.get("voice_id") + + if not avatar_id or not text or not voice_id: + return [ + types.TextContent( + type="text", + text="Error: avatar_id, text, and voice_id parameters are required", + ) + ] + + background_color = arguments.get("background_color", "#ffffff") + width = arguments.get("width", 1280) + height = arguments.get("height", 720) + avatar_style = arguments.get("avatar_style", "normal") + + try: + result = await heygen_generate_avatar_video( + avatar_id, text, voice_id, background_color, width, height, avatar_style + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "heygen_get_avatar_video_status": + video_id = arguments.get("video_id") + if not video_id: + return [ + types.TextContent( + type="text", + text="Error: video_id parameter is required", + ) + ] + + try: + result = await heygen_get_avatar_video_status(video_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Management + elif name == "heygen_list_videos": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + + try: + result = await heygen_list_videos(limit, offset) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "heygen_delete_video": + video_id = arguments.get("video_id") + if not video_id: + return [ + types.TextContent( + type="text", + text="Error: video_id parameter is required", + ) + ] + + try: + result = await heygen_delete_video(video_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Create transport managers + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + sse = SseServerTransport("/messages", app) + + async def handle_sse(request): + async def sse_handler(scope: Scope, receive: Receive, send: Send) -> None: + # Extract auth token from headers (allow None - will be handled at tool level) + auth_token = extract_api_key(request) + + # Set the auth token in context for this request (can be None/empty) + token = auth_token_context.set(auth_token or "") + try: + await sse.handle_sse(scope, receive, send) + finally: + auth_token_context.reset(token) + + return Response(content="", media_type="text/event-stream", headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Headers": "*", + "Access-Control-Allow-Methods": "*", + }) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + # Extract auth token from headers (allow None - will be handled at tool level) + auth_token = extract_api_key(scope) + + # Set the auth token in context for this request (can be None/empty) + token = auth_token_context.set(auth_token or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/heygen/tools/__init__.py b/mcp_servers/heygen/tools/__init__.py new file mode 100644 index 00000000..710e0291 --- /dev/null +++ b/mcp_servers/heygen/tools/__init__.py @@ -0,0 +1,35 @@ +""" +HeyGen MCP Server Tools Module +""" + +from .base import auth_token_context +from .account import heygen_get_remaining_credits +from .assets import ( + heygen_get_voices, + heygen_get_voice_locales, + heygen_get_avatar_groups, + heygen_get_avatars_in_avatar_group, + heygen_list_avatars +) +from .generation import ( + heygen_generate_avatar_video, + heygen_get_avatar_video_status +) +from .management import ( + heygen_list_videos, + heygen_delete_video +) + +__all__ = [ + "auth_token_context", + "heygen_get_remaining_credits", + "heygen_get_voices", + "heygen_get_voice_locales", + "heygen_get_avatar_groups", + "heygen_get_avatars_in_avatar_group", + "heygen_list_avatars", + "heygen_generate_avatar_video", + "heygen_get_avatar_video_status", + "heygen_list_videos", + "heygen_delete_video" +] \ No newline at end of file diff --git a/mcp_servers/heygen/tools/account.py b/mcp_servers/heygen/tools/account.py new file mode 100644 index 00000000..222d6eba --- /dev/null +++ b/mcp_servers/heygen/tools/account.py @@ -0,0 +1,23 @@ +""" +Account management tools for HeyGen API. +""" + +from typing import Dict, Any +from .base import make_request + +async def heygen_get_remaining_credits() -> Dict[str, Any]: + """ + Retrieve the remaining credits in your HeyGen account. + + Returns: + Dict containing remaining credits information + """ + response = await make_request("GET", "/v2/user/remaining_quota") + + # Convert quota to credits (quota Ć· 60 = credits) + if "data" in response and "quota" in response["data"]: + quota = response["data"]["quota"] + credits = quota / 60 + response["data"]["credits"] = round(credits, 2) + + return response \ No newline at end of file diff --git a/mcp_servers/heygen/tools/assets.py b/mcp_servers/heygen/tools/assets.py new file mode 100644 index 00000000..53e9e143 --- /dev/null +++ b/mcp_servers/heygen/tools/assets.py @@ -0,0 +1,95 @@ +""" +Asset management tools for HeyGen API (voices, avatars, avatar groups). +""" + +from typing import Dict, Any, Optional +from .base import make_request + +async def heygen_get_voices(limit: Optional[int] = 20) -> Dict[str, Any]: + """ + Retrieve a list of available voices from the HeyGen API. + + Args: + limit: Maximum number of voices to return (default: 20) + + Returns: + Dict containing list of available voices + """ + response = await make_request("GET", "/v2/voices") + + # Limit the response to prevent context overflow + if "data" in response and "voices" in response["data"] and limit is not None: + original_count = len(response["data"]["voices"]) + if original_count > limit: + response["data"]["voices"] = response["data"]["voices"][:limit] + response["data"]["note"] = f"Showing first {limit} of {original_count} voices. Increase limit parameter to see more." + + return response + +async def heygen_get_voice_locales(limit: Optional[int] = 20) -> Dict[str, Any]: + """ + Retrieve a list of available voice locales (languages) from the HeyGen API. + + Args: + limit: Maximum number of voice locales to return (default: 20) + + Returns: + Dict containing list of available voice locales + """ + response = await make_request("GET", "/v2/voices/locales") + + # Limit the response to prevent context overflow + if "data" in response and "locales" in response["data"] and limit is not None: + original_count = len(response["data"]["locales"]) + if original_count > limit: + response["data"]["locales"] = response["data"]["locales"][:limit] + response["data"]["note"] = f"Showing first {limit} of {original_count} voice locales. Increase limit parameter to see more." + + return response + +async def heygen_get_avatar_groups() -> Dict[str, Any]: + """ + Retrieve a list of HeyGen avatar groups. + + Returns: + Dict containing list of avatar groups + """ + return await make_request("GET", "/v2/avatar_group.list") + +async def heygen_get_avatars_in_avatar_group(group_id: str) -> Dict[str, Any]: + """ + Retrieve a list of avatars in a specific HeyGen avatar group. + + Args: + group_id: The ID of the avatar group + + Returns: + Dict containing list of avatars in the specified group + """ + return await make_request("GET", f"/v2/avatar_groups/{group_id}/avatars") + +async def heygen_list_avatars(limit: Optional[int] = 20) -> Dict[str, Any]: + """ + Retrieve a list of all available avatars from the HeyGen API. + This includes your instant avatars and public avatars. + + Args: + limit: Maximum number of avatars to return (default: 20) + + Returns: + Dict containing list of available avatars + """ + response = await make_request("GET", "/v2/avatars") + + # Remove talking_photos array to reduce response size (always remove regardless of limit) + if "data" in response and "talking_photos" in response["data"]: + del response["data"]["talking_photos"] + + # Limit the response to prevent context overflow + if "data" in response and "avatars" in response["data"] and limit is not None: + original_count = len(response["data"]["avatars"]) + if original_count > limit: + response["data"]["avatars"] = response["data"]["avatars"][:limit] + response["data"]["note"] = f"Showing first {limit} of {original_count} avatars. Increase limit parameter to see more." + + return response \ No newline at end of file diff --git a/mcp_servers/heygen/tools/base.py b/mcp_servers/heygen/tools/base.py new file mode 100644 index 00000000..6f61cd0d --- /dev/null +++ b/mcp_servers/heygen/tools/base.py @@ -0,0 +1,93 @@ +""" +Base HTTP client for HeyGen API with authentication and error handling. +""" + +import os +import httpx +from typing import Dict, Any, Optional +from contextvars import ContextVar + +auth_token_context: ContextVar[Optional[str]] = ContextVar('auth_token', default=None) + +HEYGEN_API_ENDPOINT = "https://api.heygen.com/" + +def get_auth_token() -> str: + """Get the HeyGen API token from context or environment variable.""" + try: + token = auth_token_context.get() + if not token: + token = os.getenv("HEYGEN_API_KEY") + if not token: + raise RuntimeError("No HeyGen API key found in context or environment") + return token + except LookupError: + token = os.getenv("HEYGEN_API_KEY") + if not token: + raise RuntimeError("No HeyGen API key found in context or environment") + return token + +def get_headers() -> Dict[str, str]: + """Get standard headers for HeyGen API requests.""" + return { + "X-Api-Key": get_auth_token(), + "Content-Type": "application/json", + "Accept": "application/json" + } + +async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Make an HTTP request to the HeyGen API. + + Args: + method: HTTP method (GET, POST, DELETE, etc.) + endpoint: API endpoint (without base URL) + data: Request body data for POST/PUT requests + params: Query parameters + + Returns: + Dict containing the API response + + Raises: + RuntimeError: If the API request fails + """ + url = f"{HEYGEN_API_ENDPOINT}{endpoint}" + headers = get_headers() + + async with httpx.AsyncClient(timeout=60.0) as client: + try: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data, params=params) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers, params=params) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Some endpoints may return empty response (like delete) + if response.status_code == 204 or not response.content: + return {"success": True} + + return response.json() + + except httpx.HTTPStatusError as e: + error_detail = "Unknown error" + try: + error_response = e.response.json() + if isinstance(error_response, dict): + error_detail = error_response.get("message", error_response.get("error", str(error_response))) + else: + error_detail = str(error_response) + except: + error_detail = e.response.text if hasattr(e.response, 'text') else str(e) + + raise RuntimeError(f"HeyGen API error ({e.response.status_code}): {error_detail}") + except Exception as e: + raise RuntimeError(f"HeyGen API request failed: {str(e)}") \ No newline at end of file diff --git a/mcp_servers/heygen/tools/generation.py b/mcp_servers/heygen/tools/generation.py new file mode 100644 index 00000000..79713858 --- /dev/null +++ b/mcp_servers/heygen/tools/generation.py @@ -0,0 +1,72 @@ +""" +Video generation tools for HeyGen API. +""" + +from typing import Dict, Any, Optional +from .base import make_request + +async def heygen_generate_avatar_video( + avatar_id: str, + text: str, + voice_id: str, + background_color: Optional[str] = "#ffffff", + width: Optional[int] = 1280, + height: Optional[int] = 720, + avatar_style: Optional[str] = "normal" +) -> Dict[str, Any]: + """ + Generate a new avatar video with the specified avatar, text, and voice. + + Args: + avatar_id: The ID of the avatar to use + text: The text for the avatar to speak (max 1500 characters) + voice_id: The ID of the voice to use + background_color: Background color in hex format (default: #ffffff) + width: Video width in pixels (default: 1280) + height: Video height in pixels (default: 720) + avatar_style: Avatar style (default: normal) + + Returns: + Dict containing video generation response with video_id + """ + if len(text) > 1500: + raise ValueError("Text input must be less than 1500 characters") + + video_data = { + "video_inputs": [ + { + "character": { + "type": "avatar", + "avatar_id": avatar_id, + "avatar_style": avatar_style + }, + "voice": { + "type": "text", + "input_text": text, + "voice_id": voice_id + }, + "background": { + "type": "color", + "value": background_color + } + } + ], + "dimension": { + "width": width, + "height": height + } + } + + return await make_request("POST", "/v2/video/generate", data=video_data) + +async def heygen_get_avatar_video_status(video_id: str) -> Dict[str, Any]: + """ + Retrieve the status of a video generated via the HeyGen API. + + Args: + video_id: The ID of the video to check status for + + Returns: + Dict containing video status information + """ + return await make_request("GET", "/v1/video_status.get", params={"video_id": video_id}) \ No newline at end of file diff --git a/mcp_servers/heygen/tools/management.py b/mcp_servers/heygen/tools/management.py new file mode 100644 index 00000000..bc4dbdb5 --- /dev/null +++ b/mcp_servers/heygen/tools/management.py @@ -0,0 +1,38 @@ +""" +Video management tools for HeyGen API. +""" + +from typing import Dict, Any, Optional +from .base import make_request + +async def heygen_list_videos( + limit: Optional[int] = 20, + offset: Optional[int] = 0 +) -> Dict[str, Any]: + """ + Retrieve a list of videos from your HeyGen account. + + Args: + limit: Maximum number of videos to return (default: 20) + offset: Number of videos to skip (default: 0) + + Returns: + Dict containing list of videos + """ + params = { + "limit": limit, + "offset": offset + } + return await make_request("GET", "/v1/video.list", params=params) + +async def heygen_delete_video(video_id: str) -> Dict[str, Any]: + """ + Delete a video from your HeyGen account. + + Args: + video_id: The ID of the video to delete + + Returns: + Dict containing deletion confirmation + """ + return await make_request("DELETE", "/v1/video.delete", params={"video_id": video_id}) \ No newline at end of file diff --git a/mcp_servers/hubspot/.env.example b/mcp_servers/hubspot/.env.example new file mode 100644 index 00000000..2a7fd772 --- /dev/null +++ b/mcp_servers/hubspot/.env.example @@ -0,0 +1,2 @@ +HUBSPOT_ACCESS_TOKEN=HUBSPOT_ACCESS_TOKEN +HUBSPOT_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/hubspot/Dockerfile b/mcp_servers/hubspot/Dockerfile new file mode 100644 index 00000000..91adf6fd --- /dev/null +++ b/mcp_servers/hubspot/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/hubspot/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/hubspot/server.py . +COPY mcp_servers/hubspot/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/hubspot/README.md b/mcp_servers/hubspot/README.md new file mode 100644 index 00000000..dd9c9da6 --- /dev/null +++ b/mcp_servers/hubspot/README.md @@ -0,0 +1,78 @@ +# HubSpot MCP Server + +A Model Context Protocol (MCP) server for HubSpot CRM integration. Manage contacts, deals, companies, and marketing campaigns using HubSpot's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to HubSpot with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("HUBSPOT", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/hubspot-mcp-server:latest + + +# Run HubSpot MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/hubspot-mcp-server:latest + + +# Run HubSpot MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_hubspot_access_token_here"}' \ + ghcr.io/klavis-ai/hubspot-mcp-server:latest +``` + +**OAuth Setup:** HubSpot requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Contact Management**: Create, read, update contacts and customer data +- **Deal Operations**: Manage sales deals and pipeline stages +- **Company Management**: Handle company records and relationships +- **Marketing Tools**: Manage campaigns and marketing automation +- **Custom Properties**: Work with custom fields and properties + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/hubspot/requirements.txt b/mcp_servers/hubspot/requirements.txt new file mode 100644 index 00000000..2a704ad2 --- /dev/null +++ b/mcp_servers/hubspot/requirements.txt @@ -0,0 +1,35 @@ +annotated-types==0.7.0 +anyio==4.9.0 +certifi==2025.6.15 +charset-normalizer==3.4.2 +click==8.2.1 +fastapi>=0.115.12 +h11==0.16.0 +httpcore==1.0.9 +httpx==0.28.1 +httpx-sse==0.4.0 +hubspot-api-client==12.0.0 +idna==3.10 +markdown-it-py==3.0.0 +mcp==1.11.0 +mdurl==0.1.2 +protobuf==6.31.1 +pydantic==2.11.7 +pydantic-settings==2.9.1 +pydantic_core==2.33.2 +Pygments==2.19.1 +python-dateutil==2.9.0.post0 +python-dotenv==1.1.0 +python-multipart==0.0.20 +requests==2.32.4 +rich==14.0.0 +shellingham==1.5.4 +six==1.17.0 +sniffio==1.3.1 +sse-starlette==2.3.6 +starlette==0.47.2 +typer==0.16.0 +typing-inspection==0.4.1 +typing_extensions==4.14.0 +urllib3==2.5.0 +uvicorn==0.34.3 diff --git a/mcp_servers/hubspot/server.py b/mcp_servers/hubspot/server.py new file mode 100644 index 00000000..c594665a --- /dev/null +++ b/mcp_servers/hubspot/server.py @@ -0,0 +1,1858 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + # Properties + hubspot_list_properties, + hubspot_search_by_property, + hubspot_create_property, + # Contacts + hubspot_get_contacts, + hubspot_get_contact_by_id, + hubspot_create_contact, + hubspot_update_contact_by_id, + hubspot_delete_contact_by_id, + # Companies + hubspot_get_companies, + hubspot_get_company_by_id, + hubspot_create_companies, + hubspot_update_company_by_id, + hubspot_delete_company_by_id, + # Deals + hubspot_get_deals, + hubspot_get_deal_by_id, + hubspot_create_deal, + hubspot_update_deal_by_id, + hubspot_delete_deal_by_id, + # Tickets + hubspot_get_tickets, + hubspot_get_ticket_by_id, + hubspot_create_ticket, + hubspot_update_ticket_by_id, + hubspot_delete_ticket_by_id, + # Notes + hubspot_create_note, + # Tasks + hubspot_get_tasks, + hubspot_get_task_by_id, + hubspot_create_task, + hubspot_update_task_by_id, + hubspot_delete_task_by_id, + # Associations + hubspot_create_association, + hubspot_delete_association, + hubspot_get_associations, + hubspot_batch_create_associations, +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +HUBSPOT_MCP_SERVER_PORT = int(os.getenv("HUBSPOT_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=HUBSPOT_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("hubspot-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="hubspot_list_properties", + description="List all property metadata for a HubSpot object type like contacts, companies, deals, or tickets.", + inputSchema={ + "type": "object", + "properties": { + "object_type": { + "type": "string", + "description": "The HubSpot object type. One of 'contacts', 'companies', 'deals', or 'tickets'.", + "enum": ["contacts", "companies", "deals", "tickets"] + } + }, + "required": ["object_type"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_PROPERTY", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_tasks", + description="Fetch a list of tasks from HubSpot.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Number of tasks to retrieve. Defaults to 10.", + "default": 10, + "minimum": 1 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TASK", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_task_by_id", + description="Get a specific task by HubSpot task ID.", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The HubSpot task ID." + } + }, + "required": ["task_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TASK", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_create_task", + description="Create a new task using a JSON string of properties.", + inputSchema={ + "type": "object", + "properties": { + "properties": { + "type": "string", + "description": ( + "JSON string of task properties. Required: 'hs_timestamp' (ms since epoch). " + "Optional: 'hs_task_subject', 'hs_task_body', 'hubspot_owner_id', 'hs_task_type' (CALL | EMAIL | TODO | LINKEDIN_MESSAGE), " + "'hs_task_status' (NOT_STARTED | IN_PROGRESS | WAITING | COMPLETED | DEFERRED), 'hs_task_priority' (LOW | MEDIUM | HIGH)." + ) + } + }, + "required": ["properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TASK"} + ) + ), + types.Tool( + name="hubspot_update_task_by_id", + description="Update an existing task using a JSON string of updated properties.", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to update." + }, + "updates": { + "type": "string", + "description": ( + "JSON string of the properties to update (e.g., hs_task_subject, hs_task_body, hubspot_owner_id, hs_task_type, " + "hs_task_status, hs_task_priority, hs_timestamp)." + ) + } + }, + "required": ["task_id", "updates"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TASK"} + ) + ), + types.Tool( + name="hubspot_delete_task_by_id", + description="Delete a task from HubSpot by task ID.", + inputSchema={ + "type": "object", + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to delete." + } + }, + "required": ["task_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TASK"} + ) + ), + types.Tool( + name="hubspot_search_by_property", + description="Search HubSpot objects by a specific property and value using a filter operator.", + inputSchema={ + "type": "object", + "properties": { + "object_type": { + "type": "string", + "description": "The object type to search (contacts, companies, deals, tickets)." + }, + "property_name": { + "type": "string", + "description": "The property name to filter by." + }, + "operator": { + "type": "string", + "description": """Filter operator + Supported operators (with expected value format and behavior): + + - EQ (Equal): Matches records where the property exactly equals the given value. + Example: "lifecyclestage" EQ "customer" + + - NEQ (Not Equal): Matches records where the property does not equal the given value. + Example: "country" NEQ "India" + + - GT (Greater Than): Matches records where the property is greater than the given value. + Example: "numberofemployees" GT "100" + + - GTE (Greater Than or Equal): Matches records where the property is greater than or equal to the given value. + Example: "revenue" GTE "50000" + + - LT (Less Than): Matches records where the property is less than the given value. + Example: "score" LT "75" + + - LTE (Less Than or Equal): Matches records where the property is less than or equal to the given value. + Example: "createdate" LTE "2023-01-01T00:00:00Z" + + - BETWEEN: Matches records where the property is within a specified range. + Value must be a list of two values [start, end]. + Example: "createdate" BETWEEN ["2023-01-01T00:00:00Z", "2023-12-31T23:59:59Z"] + + - IN: Matches records where the property is one of the values in the list. + Value must be a list. + Example: "industry" IN ["Technology", "Healthcare"] + + - NOT_IN: Matches records where the property is none of the values in the list. + Value must be a list. + Example: "state" NOT_IN ["CA", "NY"] + + - CONTAINS_TOKEN: Matches records where the property contains the given word/token (case-insensitive). + Example: "notes" CONTAINS_TOKEN "demo" + + - NOT_CONTAINS_TOKEN: Matches records where the property does NOT contain the given word/token. + Example: "comments" NOT_CONTAINS_TOKEN "urgent" + + - STARTS_WITH: Matches records where the property value starts with the given substring. + Example: "firstname" STARTS_WITH "Jo" + + - ENDS_WITH: Matches records where the property value ends with the given substring. + Example: "email" ENDS_WITH "@gmail.com" + + - ON_OR_AFTER: For datetime fields, matches records where the date is the same or after the given value. + Example: "createdate" ON_OR_AFTER "2024-01-01T00:00:00Z" + + - ON_OR_BEFORE: For datetime fields, matches records where the date is the same or before the given value. + Example: "closedate" ON_OR_BEFORE "2024-12-31T23:59:59Z" + + Value type rules: + - If the operator expects a list (e.g., IN, BETWEEN), pass value as a JSON-encoded string list: '["a", "b"]' + - All other operators expect a single string (even for numbers or dates)""" + }, + "value": { + "type": "string", + "description": "The value to match against the property." + }, + "properties": { + "type": "array", + "items": {"type": "string"}, + "description": "List of properties to return in the result." + }, + "limit": { + "type": "integer", + "default": 10, + "description": "Maximum number of results to return." + } + }, + "required": ["object_type", "property_name", "operator", "value", "properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_PROPERTY", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_contacts", + description="Fetch a list of contacts from HubSpot.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Number of contacts to retrieve. Defaults to 10.", + "default": 10, + "minimum": 1 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_CONTACT", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_contact_by_id", + description="Get a specific contact by HubSpot contact ID.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "The HubSpot contact ID." + } + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_CONTACT", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_create_property", + description="Create a new custom property for HubSpot contacts.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Internal name of the property." + }, + "label": { + "type": "string", + "description": "Label shown in the HubSpot UI." + }, + "description": { + "type": "string", + "description": "Description of the property." + }, + "object_type": { + "type": "string", + "description": "Type of the property, 'contacts', 'companies', 'deals' or 'tickets'" + }, + }, + "required": ["name", "label", "description", "object_type"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_PROPERTY"} + ) + ), + types.Tool( + name="hubspot_delete_contact_by_id", + description="Delete a contact from HubSpot by contact ID.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "The HubSpot contact ID to delete." + } + }, + "required": ["contact_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_CONTACT"} + ) + ), + types.Tool( + name="hubspot_create_contact", + description="Create a new contact using a JSON string of properties.", + inputSchema={ + "type": "object", + "properties": { + "properties": { + "type": "string", + "description": "JSON string containing contact fields and values." + } + }, + "required": ["properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_CONTACT"} + ) + ), + types.Tool( + name="hubspot_update_contact_by_id", + description="Update a contact in HubSpot by contact ID using JSON property updates.", + inputSchema={ + "type": "object", + "properties": { + "contact_id": { + "type": "string", + "description": "HubSpot contact ID to update." + }, + "updates": { + "type": "string", + "description": "JSON string with fields to update." + } + }, + "required": ["contact_id", "updates"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_CONTACT"} + ) + ), + types.Tool( + name="hubspot_create_companies", + description="Create a new company using a JSON string of fields.", + inputSchema={ + "type": "object", + "properties": { + "properties": { + "type": "string", + "description": "JSON string containing company fields and values." + } + }, + "required": ["properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_COMPANY"} + ) + ), + types.Tool( + name="hubspot_get_companies", + description="Fetch a list of companies from HubSpot.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Number of companies to retrieve. Defaults to 10.", + "default": 10, + "minimum": 1 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_COMPANY", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_company_by_id", + description="Get a company from HubSpot by company ID.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "string", + "description": "The HubSpot company ID." + } + }, + "required": ["company_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_COMPANY", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_update_company_by_id", + description="Update an existing company by ID using JSON property updates.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "string", + "description": "The HubSpot company ID to update." + }, + "updates": { + "type": "string", + "description": "JSON string with fields to update." + } + }, + "required": ["company_id", "updates"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_COMPANY"} + ) + ), + types.Tool( + name="hubspot_delete_company_by_id", + description="Delete a company from HubSpot by company ID.", + inputSchema={ + "type": "object", + "properties": { + "company_id": { + "type": "string", + "description": "The HubSpot company ID to delete." + } + }, + "required": ["company_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_COMPANY"} + ) + ), + types.Tool( + name="hubspot_get_deals", + description="Fetch a list of deals from HubSpot.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Number of deals to retrieve. Defaults to 10.", + "default": 10, + "minimum": 1 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_DEAL", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_deal_by_id", + description="Fetch a deal by its ID.", + inputSchema={ + "type": "object", + "properties": { + "deal_id": { + "type": "string", + "description": "The HubSpot deal ID." + } + }, + "required": ["deal_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_DEAL", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_create_deal", + description="Create a new deal using a JSON string of properties.", + inputSchema={ + "type": "object", + "properties": { + "properties": { + "type": "string", + "description": "JSON string with fields to create the deal." + } + }, + "required": ["properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_DEAL"} + ) + ), + types.Tool( + name="hubspot_update_deal_by_id", + description="Update an existing deal using a JSON string of updated properties.", + inputSchema={ + "type": "object", + "properties": { + "deal_id": { + "type": "string", + "description": "The ID of the deal to update." + }, + "updates": { + "type": "string", + "description": "JSON string of the properties to update." + } + }, + "required": ["deal_id", "updates"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_DEAL"} + ) + ), + types.Tool( + name="hubspot_delete_deal_by_id", + description="Delete a deal from HubSpot by deal ID.", + inputSchema={ + "type": "object", + "properties": { + "deal_id": { + "type": "string", + "description": "The ID of the deal to delete." + } + }, + "required": ["deal_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_DEAL"} + ) + ), + types.Tool( + name="hubspot_get_tickets", + description="Fetch a list of tickets from HubSpot.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Number of tickets to retrieve. Defaults to 10.", + "default": 10, + "minimum": 1 + } + } + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TICKET", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_get_ticket_by_id", + description="Fetch a ticket by its ID.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "string", + "description": "The HubSpot ticket ID." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TICKET", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_create_ticket", + description="Create a new ticket using a JSON string of properties.", + inputSchema={ + "type": "object", + "properties": { + "properties": { + "type": "string", + "description": "JSON string with fields to create the ticket." + } + }, + "required": ["properties"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TICKET"} + ) + ), + types.Tool( + name="hubspot_update_ticket_by_id", + description="Update an existing ticket using a JSON string of updated properties.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "string", + "description": "The ID of the ticket to update." + }, + "updates": { + "type": "string", + "description": "JSON string of the properties to update." + } + }, + "required": ["ticket_id", "updates"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TICKET"} + ) + ), + types.Tool( + name="hubspot_delete_ticket_by_id", + description="Delete a ticket from HubSpot by ticket ID.", + inputSchema={ + "type": "object", + "properties": { + "ticket_id": { + "type": "string", + "description": "The ID of the ticket to delete." + } + }, + "required": ["ticket_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_TICKET"} + ) + ), + types.Tool( + name="hubspot_create_note", + description="Create a new note in HubSpot with optional associations to contacts, companies, deals, or tickets.", + inputSchema={ + "type": "object", + "properties": { + "note_body": { + "type": "string", + "description": "The content of the note." + }, + "contact_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of contact IDs to associate with the note." + }, + "company_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of company IDs to associate with the note." + }, + "deal_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of deal IDs to associate with the note." + }, + "ticket_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of ticket IDs to associate with the note." + }, + "owner_id": { + "type": "string", + "description": "HubSpot user ID of the note owner." + }, + "timestamp": { + "type": "string", + "description": "ISO 8601 timestamp or milliseconds since epoch for when the note was created (defaults to current time if not provided)." + } + }, + "required": ["note_body"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_NOTE"} + ) + ), + types.Tool( + name="hubspot_create_association", + description="Create an association (link) between two HubSpot objects. For example, link a deal to a contact, or a deal to a company.", + inputSchema={ + "type": "object", + "properties": { + "from_object_type": { + "type": "string", + "description": "The object type to associate from (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "from_object_id": { + "type": "string", + "description": "The ID of the source object." + }, + "to_object_type": { + "type": "string", + "description": "The object type to associate to (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "to_object_id": { + "type": "string", + "description": "The ID of the target object." + }, + "association_type_id": { + "type": "integer", + "description": "Optional custom association type ID. If not provided, uses the default association type for the object pair." + } + }, + "required": ["from_object_type", "from_object_id", "to_object_type", "to_object_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_ASSOCIATION"} + ) + ), + types.Tool( + name="hubspot_delete_association", + description="Remove an association (unlink) between two HubSpot objects.", + inputSchema={ + "type": "object", + "properties": { + "from_object_type": { + "type": "string", + "description": "The object type to disassociate from (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "from_object_id": { + "type": "string", + "description": "The ID of the source object." + }, + "to_object_type": { + "type": "string", + "description": "The object type to disassociate from (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "to_object_id": { + "type": "string", + "description": "The ID of the target object." + }, + "association_type_id": { + "type": "integer", + "description": "Optional custom association type ID. If not provided, uses the default association type for the object pair." + } + }, + "required": ["from_object_type", "from_object_id", "to_object_type", "to_object_id"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_ASSOCIATION"} + ) + ), + types.Tool( + name="hubspot_get_associations", + description="Get all associations of a specific type for an object. For example, get all contacts associated with a deal.", + inputSchema={ + "type": "object", + "properties": { + "from_object_type": { + "type": "string", + "description": "The source object type (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "from_object_id": { + "type": "string", + "description": "The ID of the source object." + }, + "to_object_type": { + "type": "string", + "description": "The type of objects to get associations for (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + } + }, + "required": ["from_object_type", "from_object_id", "to_object_type"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_ASSOCIATION", "readOnlyHint": True} + ) + ), + types.Tool( + name="hubspot_batch_create_associations", + description="Create multiple associations at once (batch operation). For example, link a deal to multiple contacts at once.", + inputSchema={ + "type": "object", + "properties": { + "from_object_type": { + "type": "string", + "description": "The object type to associate from (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "from_object_id": { + "type": "string", + "description": "The ID of the source object." + }, + "to_object_type": { + "type": "string", + "description": "The object type to associate to (contacts, companies, deals, tickets).", + "enum": ["contacts", "companies", "deals", "tickets"] + }, + "to_object_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of target object IDs to associate with." + }, + "association_type_id": { + "type": "integer", + "description": "Optional custom association type ID. If not provided, uses the default association type for the object pair." + } + }, + "required": ["from_object_type", "from_object_id", "to_object_type", "to_object_ids"] + }, + annotations=types.ToolAnnotations( + **{"category": "HUBSPOT_ASSOCIATION"} + ) + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # Properties + if name == "hubspot_list_properties": + try: + object_type = arguments.get("object_type") + result = await hubspot_list_properties(object_type) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_search_by_property": + object_type = arguments.get("object_type") + property_name = arguments.get("property_name") + operator = arguments.get("operator") + value = arguments.get("value") + properties = arguments.get("properties", []) + limit = arguments.get("limit", 10) + + if not all([object_type, property_name, operator, value, properties]): + return [ + types.TextContent( + type="text", + text="Missing required parameters. Required: object_type, property_name, operator, value, properties.", + ) + ] + + try: + result = await hubspot_search_by_property( + object_type, property_name, operator, value, properties, limit + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_property": + try: + result = await hubspot_create_property( + name=arguments["name"], + label=arguments["label"], + description=arguments["description"], + object_type=arguments["object_type"] + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Contacts + elif name == "hubspot_get_contacts": + try: + limit = arguments.get("limit", 10) + result = await hubspot_get_contacts(limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_contact_by_id": + contact_id = arguments.get("contact_id") + if not contact_id: + return [ + types.TextContent( + type="text", + text="Error: contact_id parameter is required", + ) + ] + try: + result = await hubspot_get_contact_by_id(contact_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_contact": + try: + result = await hubspot_create_contact(arguments["properties"]) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_update_contact_by_id": + contact_id = arguments.get("contact_id") + updates = arguments.get("updates") + if not contact_id or not updates: + return [ + types.TextContent( + type="text", + text="Error: contact_id and updates parameters are required", + ) + ] + try: + result = await hubspot_update_contact_by_id( + contact_id=contact_id, + updates=updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_contact_by_id": + contact_id = arguments.get("contact_id") + if not contact_id: + return [ + types.TextContent( + type="text", + text="Error: contact_id parameter is required", + ) + ] + try: + result = await hubspot_delete_contact_by_id(contact_id) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Companies + elif name == "hubspot_get_companies": + try: + limit = arguments.get("limit", 10) + result = await hubspot_get_companies(limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_company_by_id": + company_id = arguments.get("company_id") + if not company_id: + return [ + types.TextContent( + type="text", + text="Error: company_id parameter is required", + ) + ] + try: + result = await hubspot_get_company_by_id(company_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_companies": + try: + result = await hubspot_create_companies(arguments["properties"]) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_update_company_by_id": + company_id = arguments.get("company_id") + updates = arguments.get("updates") + if not company_id or not updates: + return [ + types.TextContent( + type="text", + text="Error: company_id and updates parameters are required", + ) + ] + try: + result = await hubspot_update_company_by_id( + company_id=company_id, + updates=updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_company_by_id": + company_id = arguments.get("company_id") + if not company_id: + return [ + types.TextContent( + type="text", + text="Error: company_id parameter is required", + ) + ] + try: + result = await hubspot_delete_company_by_id(company_id) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Deals + elif name == "hubspot_get_deals": + try: + limit = arguments.get("limit", 10) + result = await hubspot_get_deals(limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_deal_by_id": + deal_id = arguments.get("deal_id") + if not deal_id: + return [ + types.TextContent( + type="text", + text="Error: deal_id parameter is required", + ) + ] + try: + result = await hubspot_get_deal_by_id(deal_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_deal": + try: + result = await hubspot_create_deal(arguments["properties"]) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_update_deal_by_id": + deal_id = arguments.get("deal_id") + updates = arguments.get("updates") + if not deal_id or not updates: + return [ + types.TextContent( + type="text", + text="Error: deal_id and updates parameters are required", + ) + ] + try: + result = await hubspot_update_deal_by_id( + deal_id=deal_id, + updates=updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_deal_by_id": + deal_id = arguments.get("deal_id") + if not deal_id: + return [ + types.TextContent( + type="text", + text="Error: deal_id parameter is required", + ) + ] + try: + result = await hubspot_delete_deal_by_id(deal_id) + return [ + types.TextContent( + type="text", + text="Deleted", + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Tickets + elif name == "hubspot_get_tickets": + try: + limit = arguments.get("limit", 10) + result = await hubspot_get_tickets(limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_ticket_by_id": + ticket_id = arguments.get("ticket_id") + if not ticket_id: + return [ + types.TextContent( + type="text", + text="Error: ticket_id parameter is required", + ) + ] + try: + result = await hubspot_get_ticket_by_id(ticket_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_ticket": + try: + result = await hubspot_create_ticket(arguments["properties"]) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_update_ticket_by_id": + ticket_id = arguments.get("ticket_id") + updates = arguments.get("updates") + if not ticket_id or not updates: + return [ + types.TextContent( + type="text", + text="Error: ticket_id and updates parameters are required", + ) + ] + try: + result = await hubspot_update_ticket_by_id( + ticket_id=ticket_id, + updates=updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_ticket_by_id": + ticket_id = arguments.get("ticket_id") + if not ticket_id: + return [ + types.TextContent( + type="text", + text="Error: ticket_id parameter is required", + ) + ] + try: + result = await hubspot_delete_ticket_by_id(ticket_id) + return [ + types.TextContent( + type="text", + text="Deleted", + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_note": + note_body = arguments.get("note_body") + if not note_body: + return [ + types.TextContent( + type="text", + text="Error: note_body parameter is required", + ) + ] + try: + result = await hubspot_create_note( + note_body=note_body, + contact_ids=arguments.get("contact_ids"), + company_ids=arguments.get("company_ids"), + deal_ids=arguments.get("deal_ids"), + ticket_ids=arguments.get("ticket_ids"), + owner_id=arguments.get("owner_id"), + timestamp=arguments.get("timestamp") + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Tasks + elif name == "hubspot_get_tasks": + try: + limit = arguments.get("limit", 10) + result = await hubspot_get_tasks(limit) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_task_by_id": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + try: + result = await hubspot_get_task_by_id(task_id) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_create_task": + try: + result = await hubspot_create_task(arguments["properties"]) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_update_task_by_id": + task_id = arguments.get("task_id") + updates = arguments.get("updates") + if not task_id or not updates: + return [ + types.TextContent( + type="text", + text="Error: task_id and updates parameters are required", + ) + ] + try: + result = await hubspot_update_task_by_id( + task_id=task_id, + updates=updates + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_task_by_id": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + try: + result = await hubspot_delete_task_by_id(task_id) + return [ + types.TextContent( + type="text", + text="Deleted", + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Associations + elif name == "hubspot_create_association": + from_object_type = arguments.get("from_object_type") + from_object_id = arguments.get("from_object_id") + to_object_type = arguments.get("to_object_type") + to_object_id = arguments.get("to_object_id") + association_type_id = arguments.get("association_type_id") + + if not all([from_object_type, from_object_id, to_object_type, to_object_id]): + return [ + types.TextContent( + type="text", + text="Error: from_object_type, from_object_id, to_object_type, and to_object_id are required", + ) + ] + try: + result = await hubspot_create_association( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type, + to_object_id=to_object_id, + association_type_id=association_type_id + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_delete_association": + from_object_type = arguments.get("from_object_type") + from_object_id = arguments.get("from_object_id") + to_object_type = arguments.get("to_object_type") + to_object_id = arguments.get("to_object_id") + association_type_id = arguments.get("association_type_id") + + if not all([from_object_type, from_object_id, to_object_type, to_object_id]): + return [ + types.TextContent( + type="text", + text="Error: from_object_type, from_object_id, to_object_type, and to_object_id are required", + ) + ] + try: + result = await hubspot_delete_association( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type, + to_object_id=to_object_id, + association_type_id=association_type_id + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_get_associations": + from_object_type = arguments.get("from_object_type") + from_object_id = arguments.get("from_object_id") + to_object_type = arguments.get("to_object_type") + + if not all([from_object_type, from_object_id, to_object_type]): + return [ + types.TextContent( + type="text", + text="Error: from_object_type, from_object_id, and to_object_type are required", + ) + ] + try: + result = await hubspot_get_associations( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "hubspot_batch_create_associations": + from_object_type = arguments.get("from_object_type") + from_object_id = arguments.get("from_object_id") + to_object_type = arguments.get("to_object_type") + to_object_ids = arguments.get("to_object_ids") + association_type_id = arguments.get("association_type_id") + + if not all([from_object_type, from_object_id, to_object_type, to_object_ids]): + return [ + types.TextContent( + type="text", + text="Error: from_object_type, from_object_id, to_object_type, and to_object_ids are required", + ) + ] + try: + result = await hubspot_batch_create_associations( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type, + to_object_ids=to_object_ids, + association_type_id=association_type_id + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/hubspot/tools/__init__.py b/mcp_servers/hubspot/tools/__init__.py new file mode 100644 index 00000000..284c6b1e --- /dev/null +++ b/mcp_servers/hubspot/tools/__init__.py @@ -0,0 +1,114 @@ +from .base import ( + auth_token_context, +) + +from .properties import ( + hubspot_list_properties, + hubspot_search_by_property, + hubspot_create_property, +) + +from .contacts import ( + hubspot_get_contacts, + hubspot_get_contact_by_id, + hubspot_delete_contact_by_id, + hubspot_create_contact, + hubspot_update_contact_by_id, +) + +from .companies import ( + hubspot_get_companies, + hubspot_get_company_by_id, + hubspot_create_companies, + hubspot_update_company_by_id, + hubspot_delete_company_by_id, +) + +from .deals import ( + hubspot_get_deals, + hubspot_get_deal_by_id, + hubspot_create_deal, + hubspot_update_deal_by_id, + hubspot_delete_deal_by_id, +) + +from .tickets import ( + hubspot_get_tickets, + hubspot_get_ticket_by_id, + hubspot_create_ticket, + hubspot_update_ticket_by_id, + hubspot_delete_ticket_by_id, +) + +from .notes import ( + hubspot_create_note, +) + +from .tasks import ( + hubspot_get_tasks, + hubspot_get_task_by_id, + hubspot_create_task, + hubspot_update_task_by_id, + hubspot_delete_task_by_id, +) + +from .associations import ( + hubspot_create_association, + hubspot_delete_association, + hubspot_get_associations, + hubspot_batch_create_associations, +) + +__all__ = [ + # Base + "auth_token_context", + + # Properties + "hubspot_list_properties", + "hubspot_search_by_property", + "hubspot_create_property", + + # Contacts + "hubspot_get_contacts", + "hubspot_get_contact_by_id", + "hubspot_delete_contact_by_id", + "hubspot_create_contact", + "hubspot_update_contact_by_id", + + # Companies + "hubspot_get_companies", + "hubspot_get_company_by_id", + "hubspot_create_companies", + "hubspot_update_company_by_id", + "hubspot_delete_company_by_id", + + # Deals + "hubspot_get_deals", + "hubspot_get_deal_by_id", + "hubspot_create_deal", + "hubspot_update_deal_by_id", + "hubspot_delete_deal_by_id", + + # Tickets + "hubspot_get_tickets", + "hubspot_get_ticket_by_id", + "hubspot_create_ticket", + "hubspot_update_ticket_by_id", + "hubspot_delete_ticket_by_id", + + # Notes + "hubspot_create_note", + + # Tasks + "hubspot_get_tasks", + "hubspot_get_task_by_id", + "hubspot_create_task", + "hubspot_update_task_by_id", + "hubspot_delete_task_by_id", + + # Associations + "hubspot_create_association", + "hubspot_delete_association", + "hubspot_get_associations", + "hubspot_batch_create_associations", +] diff --git a/mcp_servers/hubspot/tools/associations.py b/mcp_servers/hubspot/tools/associations.py new file mode 100644 index 00000000..fddc7353 --- /dev/null +++ b/mcp_servers/hubspot/tools/associations.py @@ -0,0 +1,251 @@ +import logging +from typing import List, Optional +from .base import get_hubspot_client + +logger = logging.getLogger(__name__) + +# HubSpot Association Type IDs (default types) +ASSOCIATION_TYPES = { + # Contact to Company + "contacts_to_companies": 1, + "companies_to_contacts": 2, + + # Contact to Deal + "contacts_to_deals": 3, + "deals_to_contacts": 4, + + # Contact to Ticket + "contacts_to_tickets": 15, + "tickets_to_contacts": 16, + + # Company to Deal + "companies_to_deals": 5, + "deals_to_companies": 6, + + # Company to Ticket + "companies_to_tickets": 25, + "tickets_to_companies": 26, + + # Deal to Ticket + "deals_to_tickets": 27, + "tickets_to_deals": 28, + + # Contact to Contact (related contacts) + "contacts_to_contacts": 449, + + # Company to Company (parent/child companies) + "companies_to_companies": 13, +} + + +async def hubspot_create_association( + from_object_type: str, + from_object_id: str, + to_object_type: str, + to_object_id: str, + association_type_id: Optional[int] = None +): + """ + Create an association between two HubSpot objects. + + Parameters: + - from_object_type: The object type to associate from (contacts, companies, deals, tickets) + - from_object_id: The ID of the source object + - to_object_type: The object type to associate to (contacts, companies, deals, tickets) + - to_object_id: The ID of the target object + - association_type_id: Optional custom association type ID. If not provided, uses default association type. + + Returns: + - Result message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Creating association from {from_object_type}:{from_object_id} to {to_object_type}:{to_object_id}") + + # Use the V4 associations API with create_default method + # This creates the default (most generic) association type between two object types + result = client.crm.associations.v4.basic_api.create_default( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type, + to_object_id=to_object_id + ) + + logger.info(f"Association created successfully: {from_object_type}:{from_object_id} -> {to_object_type}:{to_object_id}") + return { + "status": "success", + "message": f"Associated {from_object_type} {from_object_id} with {to_object_type} {to_object_id}", + "from_object_type": from_object_type, + "from_object_id": from_object_id, + "to_object_type": to_object_type, + "to_object_id": to_object_id, + "result": str(result) if result else "Association created" + } + except Exception as e: + logger.error(f"Error creating association: {e}") + raise Exception(f"Failed to create association: {str(e)}") + + +async def hubspot_delete_association( + from_object_type: str, + from_object_id: str, + to_object_type: str, + to_object_id: str, + association_type_id: Optional[int] = None +): + """ + Remove an association between two HubSpot objects. + + Parameters: + - from_object_type: The object type to disassociate from (contacts, companies, deals, tickets) + - from_object_id: The ID of the source object + - to_object_type: The object type to disassociate from (contacts, companies, deals, tickets) + - to_object_id: The ID of the target object + - association_type_id: Optional custom association type ID (not used, kept for compatibility). + + Returns: + - Result message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Removing association from {from_object_type}:{from_object_id} to {to_object_type}:{to_object_id}") + + # Use the V4 associations API archive method + # This deletes all associations between two records + client.crm.associations.v4.basic_api.archive( + object_type=from_object_type, + object_id=from_object_id, + to_object_type=to_object_type, + to_object_id=to_object_id + ) + + logger.info(f"Association removed successfully: {from_object_type}:{from_object_id} -> {to_object_type}:{to_object_id}") + return { + "status": "success", + "message": f"Removed association between {from_object_type} {from_object_id} and {to_object_type} {to_object_id}" + } + except Exception as e: + logger.error(f"Error removing association: {e}") + raise Exception(f"Failed to remove association: {str(e)}") + + +async def hubspot_get_associations( + from_object_type: str, + from_object_id: str, + to_object_type: str +): + """ + Get all associations of a specific type for an object. + + Parameters: + - from_object_type: The source object type (contacts, companies, deals, tickets) + - from_object_id: The ID of the source object + - to_object_type: The type of objects to get associations for (contacts, companies, deals, tickets) + + Returns: + - List of associated object IDs + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching associations for {from_object_type}:{from_object_id} to {to_object_type}") + + # Use the V4 associations API to get associations + result = client.crm.associations.v4.basic_api.get_page( + object_type=from_object_type, + object_id=from_object_id, + to_object_type=to_object_type + ) + + associations = [] + if hasattr(result, 'results') and result.results: + for assoc in result.results: + associations.append({ + "to_object_id": assoc.to_object_id if hasattr(assoc, 'to_object_id') else assoc.id, + "association_types": [ + { + "category": at.category if hasattr(at, 'category') else None, + "type_id": at.type_id if hasattr(at, 'type_id') else None, + "label": at.label if hasattr(at, 'label') else None + } + for at in (assoc.association_types if hasattr(assoc, 'association_types') else []) + ] if hasattr(assoc, 'association_types') else [] + }) + + logger.info(f"Found {len(associations)} associations from {from_object_type}:{from_object_id} to {to_object_type}") + return { + "from_object_type": from_object_type, + "from_object_id": from_object_id, + "to_object_type": to_object_type, + "associations": associations, + "total": len(associations) + } + except Exception as e: + logger.error(f"Error fetching associations: {e}") + raise Exception(f"Failed to fetch associations: {str(e)}") + + +async def hubspot_batch_create_associations( + from_object_type: str, + from_object_id: str, + to_object_type: str, + to_object_ids: List[str], + association_type_id: Optional[int] = None +): + """ + Create multiple associations at once (batch operation). + + Parameters: + - from_object_type: The object type to associate from (contacts, companies, deals, tickets) + - from_object_id: The ID of the source object + - to_object_type: The object type to associate to (contacts, companies, deals, tickets) + - to_object_ids: List of target object IDs to associate with + - association_type_id: Optional custom association type ID (not used, kept for compatibility). + + Returns: + - Result message with count of associations created + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Creating batch associations from {from_object_type}:{from_object_id} to {len(to_object_ids)} {to_object_type}") + + # Create associations one by one using create_default + success_count = 0 + errors = [] + + for to_id in to_object_ids: + try: + client.crm.associations.v4.basic_api.create_default( + from_object_type=from_object_type, + from_object_id=from_object_id, + to_object_type=to_object_type, + to_object_id=to_id + ) + success_count += 1 + except Exception as e: + errors.append(f"Failed to associate with {to_id}: {str(e)}") + logger.warning(f"Failed to create association with {to_id}: {e}") + + logger.info(f"Batch association completed: {success_count}/{len(to_object_ids)} successful") + return { + "status": "completed", + "total_requested": len(to_object_ids), + "successful": success_count, + "failed": len(errors), + "errors": errors if errors else None + } + except Exception as e: + logger.error(f"Error in batch create associations: {e}") + raise Exception(f"Failed to create batch associations: {str(e)}") + diff --git a/mcp_servers/hubspot/tools/base.py b/mcp_servers/hubspot/tools/base.py new file mode 100644 index 00000000..a6a90346 --- /dev/null +++ b/mcp_servers/hubspot/tools/base.py @@ -0,0 +1,43 @@ +import logging +import os +from contextvars import ContextVar +from hubspot import HubSpot +from typing import Optional +from dotenv import load_dotenv + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable if no token in context + token = os.getenv("HUBSPOT_ACCESS_TOKEN") + if not token: + raise RuntimeError("No authentication token available") + return token + except LookupError: + token = os.getenv("HUBSPOT_ACCESS_TOKEN") + if not token: + raise RuntimeError("Authentication token not found in request context or environment") + return token + +def get_hubspot_client() -> Optional[HubSpot]: + """Get HubSpot client with auth token from context.""" + try: + auth_token = get_auth_token() + client = HubSpot(access_token=auth_token) + return client + except RuntimeError as e: + logger.warning(f"Failed to get auth token: {e}") + return None + except Exception as e: + logger.error(f"Failed to initialize HubSpot client: {e}") + return None diff --git a/mcp_servers/hubspot/tools/companies.py b/mcp_servers/hubspot/tools/companies.py new file mode 100644 index 00000000..a8f59f4f --- /dev/null +++ b/mcp_servers/hubspot/tools/companies.py @@ -0,0 +1,127 @@ +import logging +import json +from hubspot.crm.companies import SimplePublicObjectInputForCreate, SimplePublicObjectInput +from .base import get_hubspot_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def hubspot_create_companies(properties: str) -> str: + """ + Create a new company using JSON string of properties. + + Parameters: + - properties: JSON string of company fields + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info("Creating company...") + properties = json.loads(properties) + data = SimplePublicObjectInputForCreate(properties=properties) + client.crm.companies.basic_api.create(simple_public_object_input_for_create=data) + logger.info("Company created successfully.") + return "Created" + except Exception as e: + logger.error(f"Error creating company: {e}") + return f"Error occurred: {e}" + +async def hubspot_get_companies(limit: int = 10): + """ + Fetch a list of companies from HubSpot. + + Parameters: + - limit: Number of companies to retrieve + + Returns: + - Paginated companies response + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching up to {limit} companies...") + result = client.crm.companies.basic_api.get_page(limit=limit) + logger.info(f"Fetched {len(result.results)} companies successfully.") + return result + except Exception as e: + logger.error(f"Error fetching companies: {e}") + return None + +async def hubspot_get_company_by_id(company_id: str): + """ + Get a company by ID. + + Parameters: + - company_id: ID of the company + + Returns: + - Company object + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching company with ID: {company_id}...") + result = client.crm.companies.basic_api.get_by_id(company_id) + logger.info(f"Fetched company ID: {company_id} successfully.") + return result + except Exception as e: + logger.error(f"Error fetching company by ID: {e}") + return None + +async def hubspot_update_company_by_id(company_id: str, updates: str) -> str: + """ + Update a company by ID. + + Parameters: + - company_id: ID of the company to update + - updates: JSON string of property updates + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Updating company ID: {company_id}...") + updates = json.loads(updates) + update = SimplePublicObjectInput(properties=updates) + client.crm.companies.basic_api.update(company_id, update) + logger.info(f"Company ID: {company_id} updated successfully.") + return "Done" + except Exception as e: + logger.error(f"Update failed: {e}") + return f"Error occurred: {e}" + +async def hubspot_delete_company_by_id(company_id: str) -> str: + """ + Delete a company by ID. + + Parameters: + - company_id: ID of the company + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Deleting company ID: {company_id}...") + client.crm.companies.basic_api.archive(company_id) + logger.info(f"Company ID: {company_id} deleted successfully.") + return "Deleted" + except Exception as e: + logger.error(f"Error deleting company: {e}") + return f"Error occurred: {e}" \ No newline at end of file diff --git a/mcp_servers/hubspot/tools/contacts.py b/mcp_servers/hubspot/tools/contacts.py new file mode 100644 index 00000000..6de80c46 --- /dev/null +++ b/mcp_servers/hubspot/tools/contacts.py @@ -0,0 +1,127 @@ +import logging +import json +from hubspot.crm.contacts import SimplePublicObjectInputForCreate, SimplePublicObjectInput +from .base import get_hubspot_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def hubspot_get_contacts(limit: int = 10): + """ + Fetch a list of contacts from HubSpot. + + Parameters: + - limit: Number of contacts to retrieve + + Returns: + - Paginated contacts response + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching up to {limit} contacts from HubSpot") + result = client.crm.contacts.basic_api.get_page(limit=limit) + logger.info("Successfully fetched contacts") + return result + except Exception as e: + logger.error(f"Error fetching contacts: {e}") + raise e + +async def hubspot_get_contact_by_id(contact_id: str): + """ + Get a specific contact by ID. + + Parameters: + - contact_id: ID of the contact to retrieve + + Returns: + - Contact object + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching contact with ID: {contact_id}") + result = client.crm.contacts.basic_api.get_by_id(contact_id) + logger.info("Successfully fetched contact") + return result + except Exception as e: + logger.error(f"Error fetching contact by ID: {e}") + raise e + +async def hubspot_delete_contact_by_id(contact_id: str) -> str: + """ + Delete a contact by ID. + + Parameters: + - contact_id: ID of the contact to delete + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Deleting contact with ID: {contact_id}") + client.crm.contacts.basic_api.archive(contact_id) + logger.info("Successfully deleted contact") + return "Deleted" + except Exception as e: + logger.error(f"Error deleting contact: {e}") + raise e + +async def hubspot_create_contact(properties: str) -> str: + """ + Create a new contact using JSON string of properties. + + Parameters: + - properties: JSON string containing contact fields + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + properties = json.loads(properties) + logger.info(f"Creating contact with properties: {properties}") + data = SimplePublicObjectInputForCreate(properties=properties) + client.crm.contacts.basic_api.create(simple_public_object_input_for_create=data) + logger.info("Successfully created contact") + return "Created" + except Exception as e: + logger.error(f"Error creating contact: {e}") + raise e + +async def hubspot_update_contact_by_id(contact_id: str, updates: str) -> str: + """ + Update a contact by ID. + + Parameters: + - contact_id: ID of the contact to update + - updates: JSON string of properties to update + + Returns: + - Status message + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + updates = json.loads(updates) + logger.info(f"Updating contact ID: {contact_id} with updates: {updates}") + data = SimplePublicObjectInput(properties=updates) + client.crm.contacts.basic_api.update(contact_id, data) + logger.info("Successfully updated contact") + return "Done" + except Exception as e: + logger.error(f"Update failed: {e}") + return f"Error occurred: {e}" \ No newline at end of file diff --git a/mcp_servers/hubspot/tools/deals.py b/mcp_servers/hubspot/tools/deals.py new file mode 100644 index 00000000..7be1b900 --- /dev/null +++ b/mcp_servers/hubspot/tools/deals.py @@ -0,0 +1,163 @@ +import logging +import json +from hubspot.crm.deals import SimplePublicObjectInputForCreate, SimplePublicObjectInput +from .base import get_hubspot_client + +# Configure logging +logger = logging.getLogger(__name__) + +def _build_dealstage_label_map(client) -> dict: + """ + Build a mapping from deal stage ID to its human-readable label across all deal pipelines. + + Returns: + - dict mapping stage_id -> label (e.g., {"appointmentscheduled": "Appointment Scheduled", "1890285259": "POC"}) + """ + stage_id_to_label: dict = {} + try: + pipelines = client.crm.pipelines.pipelines_api.get_all("deals") + for pipeline in getattr(pipelines, "results", []) or []: + try: + stages = client.crm.pipelines.pipeline_stages_api.get_all("deals", pipeline.id) + for stage in getattr(stages, "results", []) or []: + if getattr(stage, "id", None) and getattr(stage, "label", None): + stage_id_to_label[stage.id] = stage.label + except Exception as inner_exc: + logger.debug(f"Failed to fetch stages for pipeline {getattr(pipeline, 'id', 'unknown')}: {inner_exc}") + except Exception as exc: + logger.debug(f"Failed to fetch pipelines for deals: {exc}") + return stage_id_to_label + +async def hubspot_get_deals(limit: int = 10): + """ + Fetch a list of deals from HubSpot. + + Parameters: + - limit: Number of deals to return + + Returns: + - List of deal records + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching up to {limit} deals...") + result = client.crm.deals.basic_api.get_page(limit=limit) + # Enrich with human-readable dealstage label + stage_label_map = _build_dealstage_label_map(client) + for obj in getattr(result, "results", []) or []: + props = getattr(obj, "properties", {}) or {} + stage_id = props.get("dealstage") + if stage_id and stage_id in stage_label_map: + props["dealstage_label"] = stage_label_map[stage_id] + obj.properties = props + logger.info(f"Fetched {len(result.results)} deals successfully.") + return result + except Exception as e: + logger.error(f"Error fetching deals: {e}") + return None + +async def hubspot_get_deal_by_id(deal_id: str): + """ + Fetch a deal by its ID. + + Parameters: + - deal_id: HubSpot deal ID + + Returns: + - Deal object + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching deal ID: {deal_id}...") + result = client.crm.deals.basic_api.get_by_id(deal_id) + # Enrich with human-readable dealstage label + stage_label_map = _build_dealstage_label_map(client) + props = getattr(result, "properties", {}) or {} + stage_id = props.get("dealstage") + if stage_id and stage_id in stage_label_map: + props["dealstage_label"] = stage_label_map[stage_id] + result.properties = props + logger.info(f"Fetched deal ID: {deal_id} successfully.") + return result + except Exception as e: + logger.error(f"Error fetching deal by ID: {e}") + return None + +async def hubspot_create_deal(properties: str): + """ + Create a new deal. + + Parameters: + - properties: JSON string of deal properties + + Returns: + - Newly created deal + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info("Creating a new deal...") + props = json.loads(properties) + data = SimplePublicObjectInputForCreate(properties=props) + result = client.crm.deals.basic_api.create(simple_public_object_input_for_create=data) + logger.info("Deal created successfully.") + return result + except Exception as e: + logger.error(f"Error creating deal: {e}") + return f"Error occurred: {e}" + +async def hubspot_update_deal_by_id(deal_id: str, updates: str): + """ + Update a deal by ID. + + Parameters: + - deal_id: HubSpot deal ID + - updates: JSON string of updated fields + + Returns: + - "Done" on success, error message otherwise + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Updating deal ID: {deal_id}...") + data = SimplePublicObjectInput(properties=json.loads(updates)) + client.crm.deals.basic_api.update(deal_id, data) + logger.info(f"Deal ID: {deal_id} updated successfully.") + return "Done" + except Exception as e: + logger.error(f"Update failed for deal ID {deal_id}: {e}") + return f"Error occurred: {e}" + +async def hubspot_delete_deal_by_id(deal_id: str): + """ + Delete a deal by ID. + + Parameters: + - deal_id: HubSpot deal ID + + Returns: + - None + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Deleting deal ID: {deal_id}...") + client.crm.deals.basic_api.archive(deal_id) + logger.info(f"Deal ID: {deal_id} deleted successfully.") + return "Deleted" + except Exception as e: + logger.error(f"Error deleting deal: {e}") + return f"Error occurred: {e}" \ No newline at end of file diff --git a/mcp_servers/hubspot/tools/notes.py b/mcp_servers/hubspot/tools/notes.py new file mode 100644 index 00000000..bb53ef20 --- /dev/null +++ b/mcp_servers/hubspot/tools/notes.py @@ -0,0 +1,109 @@ +import logging +import json +from typing import List, Optional, Dict, Any +from datetime import datetime +from hubspot.crm.objects import SimplePublicObjectInputForCreate +from .base import get_hubspot_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def hubspot_create_note( + note_body: str, + contact_ids: Optional[List[str]] = None, + company_ids: Optional[List[str]] = None, + deal_ids: Optional[List[str]] = None, + ticket_ids: Optional[List[str]] = None, + owner_id: Optional[str] = None, + timestamp: Optional[str] = None +) -> str: + """ + Create a new note in HubSpot. + + Parameters: + - note_body: The content of the note + - contact_ids: List of contact IDs to associate with the note + - company_ids: List of company IDs to associate with the note + - deal_ids: List of deal IDs to associate with the note + - ticket_ids: List of ticket IDs to associate with the note + - owner_id: HubSpot user ID of the note owner + - timestamp: ISO 8601 timestamp or milliseconds since epoch for when the note was created (defaults to current time if not provided) + + Returns: + - Status message with note ID + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + # Prepare the note properties + properties = { + "hs_note_body": note_body + } + + # Add optional properties if provided + if owner_id: + properties["hubspot_owner_id"] = owner_id + + # hs_timestamp is required - use provided timestamp or current time + if timestamp: + properties["hs_timestamp"] = timestamp + else: + # Use current timestamp in milliseconds (HubSpot format) + current_timestamp = int(datetime.now().timestamp() * 1000) + properties["hs_timestamp"] = str(current_timestamp) + + # Prepare associations + associations = [] + + # Add contact associations + if contact_ids: + for contact_id in contact_ids: + associations.append({ + "to": {"id": contact_id}, + "types": [{"associationCategory": "HUBSPOT_DEFINED", "associationTypeId": 202}] + }) + + # Add company associations + if company_ids: + for company_id in company_ids: + associations.append({ + "to": {"id": company_id}, + "types": [{"associationCategory": "HUBSPOT_DEFINED", "associationTypeId": 190}] + }) + + # Add deal associations + if deal_ids: + for deal_id in deal_ids: + associations.append({ + "to": {"id": deal_id}, + "types": [{"associationCategory": "HUBSPOT_DEFINED", "associationTypeId": 214}] + }) + + # Add ticket associations + if ticket_ids: + for ticket_id in ticket_ids: + associations.append({ + "to": {"id": ticket_id}, + "types": [{"associationCategory": "HUBSPOT_DEFINED", "associationTypeId": 216}] + }) + + logger.info(f"Creating note with properties: {properties}") + + # Create the note using the objects API, Convert to HubSpot SDK format + note_input = SimplePublicObjectInputForCreate( + properties=properties, + associations=associations if associations else None + ) + + result = client.crm.objects.notes.basic_api.create( + simple_public_object_input_for_create=note_input + ) + + logger.info(f"Successfully created note with ID: {result.id}") + return f"Note created successfully with ID: {result.id}" + + except Exception as e: + logger.error(f"Error creating note: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/hubspot/tools/properties.py b/mcp_servers/hubspot/tools/properties.py new file mode 100644 index 00000000..608356e8 --- /dev/null +++ b/mcp_servers/hubspot/tools/properties.py @@ -0,0 +1,263 @@ +import logging +import json +import ast +from hubspot.crm.objects import Filter, FilterGroup, PublicObjectSearchRequest +from hubspot.crm.properties import PropertyCreate +from .base import get_hubspot_client +from .deals import _build_dealstage_label_map + +# Configure logging +logger = logging.getLogger(__name__) + +async def hubspot_list_properties(object_type: str) -> list[dict]: + """ + List all properties for a given object type. + + Parameters: + - object_type: One of "contacts", "companies", "deals", or "tickets" + + Returns: + - List of property metadata + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + logger.info(f"Executing hubspot_list_properties for object_type: {object_type}") + try: + props = client.crm.properties.core_api.get_all(object_type) + logger.info(f"Successfully Executed hubspot_list_properties for object_type: {object_type}") + return [ + { + "name": p.name, + "label": p.label, + "type": p.type, + "field_type": p.field_type + } + for p in props.results + ] + except Exception as e: + logger.exception(f"Error executing hubspot_list_properties: {e}") + raise e + +async def hubspot_search_by_property( + object_type: str, + property_name: str, + operator: str, + value: str, + properties: list[str], + limit: int = 10 +) -> list[dict]: + """ + Search HubSpot objects by property. + + Parameters: + - object_type: One of "contacts", "companies", "deals", or "tickets" + - property_name: Field to search + - operator: Filter operator (see note below) + - value: Value to search for + - properties: List of fields to return + - limit: Max number of results + + Returns: + - List of result dictionaries + + Note: + Supported operators (with expected value format and behavior): + + - EQ (Equal): Matches records where the property exactly equals the given value. + Example: "lifecyclestage" EQ "customer" + + - NEQ (Not Equal): Matches records where the property does not equal the given value. + Example: "country" NEQ "India" + + - GT (Greater Than): Matches records where the property is greater than the given value. + Example: "numberofemployees" GT "100" + + - GTE (Greater Than or Equal): Matches records where the property is greater than or equal to the given value. + Example: "revenue" GTE "50000" + + - LT (Less Than): Matches records where the property is less than the given value. + Example: "score" LT "75" + + - LTE (Less Than or Equal): Matches records where the property is less than or equal to the given value. + Example: "createdate" LTE "2023-01-01T00:00:00Z" + + - BETWEEN: Matches records where the property is within a specified range. + Value must be a list of two values [start, end]. + Example: "createdate" BETWEEN ["2023-01-01T00:00:00Z", "2023-12-31T23:59:59Z"] + + - IN: Matches records where the property is one of the values in the list. + Value must be a list. + Example: "industry" IN ["Technology", "Healthcare"] + + - NOT_IN: Matches records where the property is none of the values in the list. + Value must be a list. + Example: "state" NOT_IN ["CA", "NY"] + + - CONTAINS_TOKEN: Matches records where the property contains the given word/token (case-insensitive). + Example: "notes" CONTAINS_TOKEN "demo" + + - NOT_CONTAINS_TOKEN: Matches records where the property does NOT contain the given word/token. + Example: "comments" NOT_CONTAINS_TOKEN "urgent" + + - STARTS_WITH: Matches records where the property value starts with the given substring. + Example: "firstname" STARTS_WITH "Jo" + + - ENDS_WITH: Matches records where the property value ends with the given substring. + Example: "email" ENDS_WITH "@gmail.com" + + - ON_OR_AFTER: For datetime fields, matches records where the date is the same or after the given value. + Example: "createdate" ON_OR_AFTER "2024-01-01T00:00:00Z" + + - ON_OR_BEFORE: For datetime fields, matches records where the date is the same or before the given value. + Example: "closedate" ON_OR_BEFORE "2024-12-31T23:59:59Z" + + Value type rules: + - If the operator expects a list (e.g., IN, BETWEEN), pass value as a JSON-encoded string list: '["a", "b"]' + - All other operators expect a single string (even for numbers or dates) + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + logger.info(f"Executing hubspot_search_by_property on {object_type}: {property_name} {operator} {value}") + + try: + # Build Filter with correct fields depending on operator + filter_kwargs = {"property_name": property_name, "operator": operator} + + # Operators that require no value + if operator in {"HAS_PROPERTY", "NOT_HAS_PROPERTY"}: + pass + + # Operators that require a list of values + elif operator in {"IN", "NOT_IN"}: + values_list: list[str] = [] + try: + parsed = json.loads(value) + if isinstance(parsed, list): + values_list = [str(v) for v in parsed] + except Exception: + try: + parsed = ast.literal_eval(value) + if isinstance(parsed, list): + values_list = [str(v) for v in parsed] + except Exception: + # Fallback: split by comma + values_list = [v.strip().strip('"\'') for v in value.split(',') if v.strip()] + + if not values_list: + raise ValueError("Operator IN/NOT_IN requires a non-empty list of values") + + filter_kwargs["values"] = values_list + + # Between expects two endpoints: low and high + elif operator == "BETWEEN": + low = None + high = None + try: + parsed = json.loads(value) + if isinstance(parsed, list) and len(parsed) >= 2: + low, high = str(parsed[0]), str(parsed[1]) + except Exception: + try: + parsed = ast.literal_eval(value) + if isinstance(parsed, list) and len(parsed) >= 2: + low, high = str(parsed[0]), str(parsed[1]) + except Exception: + pass + + if low is None or high is None: + raise ValueError("Operator BETWEEN requires a list with two values [low, high]") + + filter_kwargs["value"] = low + filter_kwargs["high_value"] = high + + # All other operators use single value + else: + filter_kwargs["value"] = value + + search_request = PublicObjectSearchRequest( + filter_groups=[ + FilterGroup(filters=[ + Filter(**filter_kwargs) + ]) + ], + properties=list(properties), + limit=limit + ) + + if object_type == "contacts": + results = client.crm.contacts.search_api.do_search(public_object_search_request=search_request) + elif object_type == "companies": + results = client.crm.companies.search_api.do_search(public_object_search_request=search_request) + elif object_type == "deals": + results = client.crm.deals.search_api.do_search(public_object_search_request=search_request) + elif object_type == "tickets": + results = client.crm.tickets.search_api.do_search(public_object_search_request=search_request) + else: + raise ValueError(f"Unsupported object type: {object_type}") + + logger.info(f"hubspot_search_by_property: Found {len(results.results)} result(s)") + # Enrich deals with human-readable dealstage label + if object_type == "deals": + stage_label_map = _build_dealstage_label_map(client) + enriched: list[dict] = [] + for obj in results.results: + props = (getattr(obj, "properties", {}) or {}).copy() + stage_id = props.get("dealstage") + if stage_id and stage_id in stage_label_map: + props["dealstage_label"] = stage_label_map[stage_id] + enriched.append(props) + return enriched + # For other objects, return properties as-is + return [obj.properties for obj in results.results] + + except Exception as e: + logger.exception(f"Error executing hubspot_search_by_property: {e}") + return (f"Error executing hubspot_search_by_property: {e}") + +async def hubspot_create_property(name: str, label: str, description: str, object_type: str) -> str: + """ + Create a new custom property for HubSpot objects. + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Creating property with name: {name}, label: {label}, object_type: {object_type}") + + group_map = { + "contacts": "contactinformation", + "companies": "companyinformation", + "deals": "dealinformation", + "tickets": "ticketinformation" + } + + if object_type not in group_map: + raise ValueError(f"Invalid object_type '{object_type}'") + + group_name = group_map[object_type] + + property = PropertyCreate( + name=name, + label=label, + group_name=group_name, + type="string", # backend data type + field_type="text", # frontend input type + description=description + ) + + client.crm.properties.core_api.create( + object_type=object_type, + property_create=property + ) + + logger.info("Successfully created property") + return "Property Created" + except Exception as e: + logger.error(f"Error creating property: {e}") + raise e + diff --git a/mcp_servers/hubspot/tools/tasks.py b/mcp_servers/hubspot/tools/tasks.py new file mode 100644 index 00000000..60ceeb81 --- /dev/null +++ b/mcp_servers/hubspot/tools/tasks.py @@ -0,0 +1,165 @@ +import logging +import json +from typing import Optional + +from hubspot.crm.objects import ( + SimplePublicObjectInputForCreate, + SimplePublicObjectInput, +) + +from .base import get_hubspot_client + + +# Configure logging +logger = logging.getLogger(__name__) + + +async def hubspot_get_tasks(limit: int = 10): + """ + Fetch a list of tasks from HubSpot. + + Parameters: + - limit: Number of tasks to return + + Returns: + - List of task records + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching up to {limit} tasks...") + common_properties = [ + "hs_task_subject", + "hs_task_body", + "hs_task_status", + "hs_task_priority", + "hs_timestamp", + "hubspot_owner_id", + ] + result = client.crm.objects.tasks.basic_api.get_page( + limit=limit, + properties=common_properties, + ) + logger.info(f"Fetched {len(result.results)} tasks successfully.") + return result + except Exception as e: + logger.error(f"Error fetching tasks: {e}") + return None + + +async def hubspot_get_task_by_id(task_id: str): + """ + Fetch a task by its ID. + + Parameters: + - task_id: HubSpot task ID + + Returns: + - Task object + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching task ID: {task_id}...") + common_properties = [ + "hs_task_subject", + "hs_task_body", + "hs_task_status", + "hs_task_priority", + "hs_timestamp", + "hubspot_owner_id", + ] + result = client.crm.objects.tasks.basic_api.get_by_id( + task_id, + properties=common_properties, + ) + print(f"---Result: {result}") + logger.info(f"Fetched task ID: {task_id} successfully.") + return result + except Exception as e: + logger.error(f"Error fetching task by ID: {e}") + return None + + +async def hubspot_create_task(properties: str): + """ + Create a new task. + + Parameters: + - properties: JSON string of task properties (see HubSpot docs) + + Returns: + - Newly created task + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info("Creating new task...") + props = json.loads(properties) + data = SimplePublicObjectInputForCreate(properties=props) + result = client.crm.objects.tasks.basic_api.create( + simple_public_object_input_for_create=data + ) + logger.info("Task created successfully.") + return result + except Exception as e: + logger.error(f"Error creating task: {e}") + return f"Error occurred: {e}" + + +async def hubspot_update_task_by_id(task_id: str, updates: str): + """ + Update a task by ID. + + Parameters: + - task_id: HubSpot task ID + - updates: JSON string of updated fields + + Returns: + - "Done" on success, error message otherwise + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Updating task ID: {task_id}...") + data = SimplePublicObjectInput(properties=json.loads(updates)) + client.crm.objects.tasks.basic_api.update(task_id, data) + logger.info(f"Task ID: {task_id} updated successfully.") + return "Done" + except Exception as e: + logger.error(f"Update failed for task ID {task_id}: {e}") + return f"Error occurred: {e}" + + +async def hubspot_delete_task_by_id(task_id: str): + """ + Delete a task by ID. + + Parameters: + - task_id: HubSpot task ID + + Returns: + - "Deleted" on success, error message otherwise + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Deleting task ID: {task_id}...") + client.crm.objects.tasks.basic_api.archive(task_id) + logger.info(f"Task ID: {task_id} deleted successfully.") + return "Deleted" + except Exception as e: + logger.error(f"Error deleting task ID {task_id}: {e}") + return f"Error occurred: {e}" + + diff --git a/mcp_servers/hubspot/tools/tickets.py b/mcp_servers/hubspot/tools/tickets.py new file mode 100644 index 00000000..c781b51f --- /dev/null +++ b/mcp_servers/hubspot/tools/tickets.py @@ -0,0 +1,126 @@ +import logging +import json +from hubspot.crm.tickets import SimplePublicObjectInputForCreate, SimplePublicObjectInput +from .base import get_hubspot_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def hubspot_get_tickets(limit: int = 10): + """ + Fetch a list of tickets from HubSpot. + + Parameters: + - limit: Number of tickets to return + + Returns: + - List of ticket records + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching up to {limit} tickets...") + result = client.crm.tickets.basic_api.get_page(limit=limit) + logger.info(f"Fetched {len(result.results)} tickets successfully.") + return result + except Exception as e: + logger.error(f"Error fetching tickets: {e}") + return None + +async def hubspot_get_ticket_by_id(ticket_id: str): + """ + Fetch a ticket by its ID. + + Parameters: + - ticket_id: HubSpot ticket ID + + Returns: + - Ticket object + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Fetching ticket ID: {ticket_id}...") + result = client.crm.tickets.basic_api.get_by_id(ticket_id) + logger.info(f"Fetched ticket ID: {ticket_id} successfully.") + return result + except Exception as e: + logger.error(f"Error fetching ticket by ID: {e}") + return None + +async def hubspot_create_ticket(properties: str): + """ + Create a new ticket. + + Parameters: + - properties: JSON string of ticket properties + + Returns: + - Newly created ticket + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info("Creating new ticket...") + props = json.loads(properties) + data = SimplePublicObjectInputForCreate(properties=props) + result = client.crm.tickets.basic_api.create(simple_public_object_input_for_create=data) + logger.info("Ticket created successfully.") + return result + except Exception as e: + logger.error(f"Error creating ticket: {e}") + return f"Error occurred: {e}" + +async def hubspot_update_ticket_by_id(ticket_id: str, updates: str): + """ + Update a ticket by ID. + + Parameters: + - ticket_id: HubSpot ticket ID + - updates: JSON string of updated fields + + Returns: + - "Done" on success, error message otherwise + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Updating ticket ID: {ticket_id}...") + data = SimplePublicObjectInput(properties=json.loads(updates)) + client.crm.tickets.basic_api.update(ticket_id, data) + logger.info(f"Ticket ID: {ticket_id} updated successfully.") + return "Done" + except Exception as e: + logger.error(f"Update failed for ticket ID {ticket_id}: {e}") + return f"Error occurred: {e}" + +async def hubspot_delete_ticket_by_id(ticket_id: str): + """ + Delete a ticket by ID. + + Parameters: + - ticket_id: HubSpot ticket ID + + Returns: + - None + """ + client = get_hubspot_client() + if not client: + raise ValueError("HubSpot client not available. Please check authentication.") + + try: + logger.info(f"Deleting ticket ID: {ticket_id}...") + client.crm.tickets.basic_api.archive(ticket_id) + logger.info(f"Ticket ID: {ticket_id} deleted successfully.") + return "Deleted" + except Exception as e: + logger.error(f"Error deleting ticket ID {ticket_id}: {e}") + return f"Error occurred: {e}" \ No newline at end of file diff --git a/mcp_servers/intercom/.env.example b/mcp_servers/intercom/.env.example new file mode 100644 index 00000000..82867112 --- /dev/null +++ b/mcp_servers/intercom/.env.example @@ -0,0 +1,3 @@ +# Intercom API credentials +INTERCOM_ACCESS_TOKEN=your-actual-intercom-access-token-here +PORT=5000 diff --git a/mcp_servers/intercom/Dockerfile b/mcp_servers/intercom/Dockerfile new file mode 100644 index 00000000..5122e49a --- /dev/null +++ b/mcp_servers/intercom/Dockerfile @@ -0,0 +1,37 @@ +# Use a Node.js image as the base for building the application +FROM node:22-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/intercom/package.json mcp_servers/intercom/package-lock.json ./ + +# Install dependencies (skipping scripts to avoid prepare script execution) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/intercom . + +# Build the application using TypeScript +RUN npm run build + +# Use a smaller Node.js image for the final image +FROM node:22-slim AS release + +# Set the working directory inside the container +WORKDIR /app + +# Copy the built application from the builder stage +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +# Install only production dependencies +RUN npm ci --omit=dev --ignore-scripts + +# Expose the port your app listens on (assuming 5000 as per your server) +EXPOSE 5000 + +# Specify the command to run the application +ENTRYPOINT ["node", "dist/index.js"] \ No newline at end of file diff --git a/mcp_servers/intercom/README.md b/mcp_servers/intercom/README.md new file mode 100644 index 00000000..c4ea4bca --- /dev/null +++ b/mcp_servers/intercom/README.md @@ -0,0 +1,432 @@ +# Intercom MCP Server + +This directory contains a Model Context Protocol (MCP) server for integrating [Intercom](https://www.intercom.com/) capabilities into applications like Claude, Cursor, and other LLM clients. It allows leveraging Intercom's powerful customer messaging and support platform features through a standardized protocol. + +## Features + +This server exposes **69 comprehensive tools** covering the full Intercom API functionality: + +### Contact Management (12 tools) + +- `intercom_list_contacts`: Get all contacts with pagination and filtering support +- `intercom_get_contact`: Retrieve detailed information about specific contacts +- `intercom_create_contact`: Create new contacts with flexible requirements (email, external_id, or role) +- `intercom_update_contact`: Update existing contact properties and custom attributes +- `intercom_delete_contact`: Permanently remove contacts from workspace +- `intercom_search_contacts`: Advanced contact search with complex query filters and operators +- `intercom_merge_contact`: Merge lead contacts into user contacts +- `intercom_list_contact_notes`: Get all notes attached to a specific contact +- `intercom_create_contact_note`: Add internal notes to contact records +- `intercom_list_contact_tags`: List all tags associated with a contact +- `intercom_add_contact_tag`: Tag contacts for organization and segmentation +- `intercom_remove_contact_tag`: Remove tags from contacts + +### Conversation Management (12 tools) + +- `intercom_list_conversations`: Get all conversations with pagination and display options +- `intercom_get_conversation`: Retrieve complete conversation details with all message parts +- `intercom_create_conversation`: Create conversations initiated by contacts +- `intercom_update_conversation`: Update conversation properties (read status, title, custom attributes) +- `intercom_delete_conversation`: Remove conversations from workspace +- `intercom_search_conversations`: Advanced conversation search with filtering and operators +- `intercom_reply_conversation`: Reply to conversations (admin/user replies, notes, quick replies) +- `intercom_manage_conversation`: Manage conversation state (close, snooze, open, assign) +- `intercom_attach_contact_to_conversation`: Add participants to group conversations +- `intercom_detach_contact_from_conversation`: Remove participants from conversations +- `intercom_redact_conversation`: Redact conversation parts or source messages +- `intercom_convert_conversation_to_ticket`: Convert conversations to support tickets + +### Company Management (13 tools) + +- `intercom_list_companies`: Get all companies with pagination and ordering +- `intercom_get_company`: Retrieve detailed company information +- `intercom_create_company`: Create new companies with business data and custom attributes +- `intercom_update_company`: Update company properties, metrics, and custom data +- `intercom_delete_company`: Remove companies from workspace +- `intercom_find_company`: Find companies using external company IDs +- `intercom_list_company_users`: Get all users belonging to a specific company +- `intercom_attach_contact_to_company`: Associate contacts with companies +- `intercom_detach_contact_from_company`: Remove contact-company associations +- `intercom_list_company_segments`: Get segments that a company belongs to +- `intercom_list_company_tags`: List tags attached to companies +- `intercom_tag_company`: Tag companies (supports bulk operations) +- `intercom_untag_company`: Remove tags from companies (supports bulk operations) + +### Help Center & Knowledge Base (11 tools) + +- `intercom_list_articles`: Get all Help Center articles with pagination +- `intercom_get_article`: Retrieve specific article content and metadata +- `intercom_create_article`: Create new Help Center articles with multilingual support +- `intercom_update_article`: Update article content, state, and translations +- `intercom_delete_article`: Remove articles from Help Center +- `intercom_search_articles`: Search articles by phrase, state, author, and parent +- `intercom_list_collections`: Get all Help Center collections +- `intercom_get_collection`: Retrieve collection details and structure +- `intercom_create_collection`: Create new collections with hierarchy support +- `intercom_update_collection`: Update collection properties and translations +- `intercom_delete_collection`: Remove collections from Help Center + +### Messaging & Communication (7 tools) + +- `intercom_create_message`: Send admin-initiated messages (in-app or email) +- `intercom_list_messages`: Get all messages sent from workspace +- `intercom_get_message`: Retrieve specific message details +- `intercom_create_note`: Add internal notes to contact records +- `intercom_list_notes`: Get notes for specific contacts with pagination +- `intercom_get_note`: Retrieve specific note details +- `intercom_send_user_message`: Create user/contact-initiated conversations + +### Tags & Segmentation (7 tools) + +- `intercom_list_tags`: Get all workspace tags +- `intercom_get_tag`: Retrieve specific tag information +- `intercom_create_or_update_tag`: Create new tags or update existing ones +- `intercom_tag_companies`: Tag companies (supports bulk operations) +- `intercom_untag_companies`: Remove tags from companies (supports bulk operations) +- `intercom_tag_users`: Tag multiple users/contacts at once +- `intercom_delete_tag`: Remove tags from workspace + +### Team & Admin Management (7 tools) + +- `intercom_list_teams`: Get all teams in workspace +- `intercom_get_team`: Retrieve team details with admin members +- `intercom_list_admins`: Get all admin users/teammates +- `intercom_get_admin`: Retrieve specific admin details +- `intercom_get_current_admin`: Get currently authenticated admin information +- `intercom_set_admin_away`: Set admin away status with conversation reassignment +- `intercom_list_admin_activity_logs`: Get audit trail of admin activities + +## Prerequisites + +- **Node.js:** Version 18.0.0 or higher +- **npm:** Node Package Manager (usually comes with Node.js) +- **TypeScript:** For development and building +- **Intercom Access Token:** Obtainable from your [Intercom Developer Hub](https://developers.intercom.com/) + +## Environment Setup + +Before running the server, you need to configure your Intercom API credentials. + +1. Create an environment file: +cp .env.example .env + + +**Note:** If `.env.example` doesn't exist, create `.env` directly: + +touch .env + +2. Edit `.env` and add your Intercom access token: + +Intercom API credentials +INTERCOM_ACCESS_TOKEN=your-actual-access-token-here +PORT=5000 + + +- `INTERCOM_ACCESS_TOKEN` (Required): Your access token for the Intercom API. You can obtain this from your Intercom Developer Hub by creating an app and generating an access token. +- `PORT` (Optional): The port number for the server to listen on. Defaults to 5000. + +## Getting Your Intercom Access Token + +### For Development (Access Token): + +1. Visit the [Intercom Developer Hub](https://developers.intercom.com/) +2. Sign in with your Intercom account +3. Create a new app or select an existing one +4. Go to the "Authentication" tab +5. Copy the Access Token (typically starts with `dG9r:`) +6. Use this token as your `INTERCOM_ACCESS_TOKEN` + +### Required Scopes: + +Ensure your Intercom app has the necessary scopes for the APIs you want to use: +- `read:contacts` - For contact management tools +- `write:contacts` - For creating/updating contacts +- `read:conversations` - For conversation management tools +- `write:conversations` - For creating/updating conversations +- `read:companies` - For company management tools +- `write:companies` - For creating/updating companies +- `read:articles` - For Help Center tools +- `write:articles` - For creating/updating articles +- `read:admins` - For team management tools + +## Running Locally + +### Using Node.js / npm + +1. **Install Dependencies:** +npm install + + + +2. **Build the Server Code:** + +npm run build + +3. **Start the Server:** + +npm start + + +The server will start using the environment variables defined in `.env` and listen on port 5000 (or the port specified by the `PORT` environment variable). + +## API Reference + +### Key Tool Examples + +#### Contact Management + +##### `intercom_create_contact` + +Create a new contact in your Intercom workspace. + +**Parameters:** + +- `role` (string, optional): "user" or "lead" +- `external_id` (string, optional): Your unique identifier for the contact +- `email` (string, optional): Contact's email address +- `phone` (string, optional): Contact's phone number +- `name` (string, optional): Contact's name +- `signed_up_at` (number, optional): UNIX timestamp of signup +- `custom_attributes` (object, optional): Custom data for the contact + +**Note:** At least one of `email`, `external_id`, or `role` must be provided. + +##### `intercom_search_contacts` + +Search contacts using advanced query filters. + +**Parameters:** + +- `query` (object, required): Search query with field, operator, and value +- `pagination` (object, optional): Pagination options with per_page and starting_after + +**Example query:** +{ +"query": { +"field": "email", +"operator": "=", +"value": "user@example.com" +} +} + + +#### Conversation Management + +##### `intercom_create_conversation` + +Create a conversation initiated by a contact. + +**Parameters:** + +- `from` (object, required): Contact object with type and id +- `body` (string, required): Message content +- `created_at` (number, optional): UNIX timestamp + +##### `intercom_reply_conversation` + +Reply to an existing conversation. + +**Parameters:** + +- `id` (string, required): Conversation ID +- `message_type` (string, optional): "comment", "note", or "quick_reply" +- `type` (string, optional): "admin" or "user" +- `admin_id` (string, optional): ID of replying admin +- `body` (string, optional): Reply content + +#### Company Management + +##### `intercom_create_company` + +Create a new company in your workspace. + +**Parameters:** + +- `name` (string, optional): Company name +- `company_id` (string, optional): Your unique identifier +- `plan` (string, optional): Company's plan/tier +- `size` (number, optional): Number of employees +- `website` (string, optional): Company website URL +- `monthly_spend` (number, optional): Revenue from this company +- `custom_attributes` (object, optional): Custom company data + +**Note:** At least one of `name` or `company_id` must be provided. + +#### Help Center Management + +##### `intercom_create_article` + +Create a new Help Center article. + +**Parameters:** + +- `title` (string, required): Article title +- `author_id` (number, required): ID of the author (must be a teammate) +- `description` (string, optional): Article description +- `body` (string, optional): Article content (HTML supported) +- `state` (string, optional): "published" or "draft" +- `translated_content` (object, optional): Multilingual content + +#### Messaging + +##### `intercom_create_message` + +Send an admin-initiated message. + +**Parameters:** + +- `message_type` (string, required): "in_app" or "email" +- `body` (string, required): Message content +- `from` (object, required): Admin sender object +- `to` (object, required): Contact recipient object +- `subject` (string, required for email): Email subject line +- `template` (string, required for email): "plain" or "personal" + +## Authentication + +The server supports two methods of authentication: + +1. **Environment Variable (Recommended):** Set `INTERCOM_ACCESS_TOKEN` in your `.env` file. +2. **HTTP Header:** Pass the access token as `x-auth-token` header in requests to the MCP server. + +Both Bearer token format (`Bearer dG9r:token123`) and direct token format (`dG9r:token123`) are supported. + +## Protocol Support + +This server supports both MCP protocol versions: + +- **Streamable HTTP Transport** (Protocol Version 2025-03-26) - **Recommended** + - Endpoint: `POST /mcp` + - Single request/response model + - Simpler implementation and better performance + +- **HTTP+SSE Transport** (Protocol Version 2024-11-05) - **Legacy Support** + - Endpoints: `GET /sse`, `POST /messages`, `DELETE /sse/:sessionId` + - Persistent connections with session management + - Server-Sent Events for real-time communication + +### Additional Endpoints + +- `GET /health` - Health check endpoint +- `GET /sse/status` - SSE connection status and monitoring + +## Testing with MCP Clients + +### Claude Desktop + +Add to your MCP configuration: + +{ +"mcpServers": { +"intercom": { +"command": "node", +"args": ["path/to/your/server/dist/index.js"], +"env": { +"INTERCOM_ACCESS_TOKEN": "your_actual_token_here" +} +} +} +} + +text + +### Cursor IDE + +1. Create `.cursor/mcp.json` in your project: +{ +"mcpServers": { +"intercom": { +"url": "http://localhost:5000/mcp", +"headers": { +"x-auth-token": "your_actual_token_here" +} +} +} +} + +text + +2. Test with natural language: +- "Create a new contact with email john@example.com" +- "List all conversations from the last week" +- "Search for companies with more than 100 employees" +- "Create a Help Center article about getting started" +- "Tag all enterprise customers with 'high-value'" + +## Error Handling + +The server includes comprehensive error handling: + +- **Authentication errors**: Invalid or missing tokens, insufficient permissions +- **API errors**: Intercom API rate limits, invalid parameters, resource not found +- **Connection errors**: Network issues and timeouts +- **Validation errors**: Invalid input parameters, missing required fields + +All errors are returned in proper JSON-RPC format with descriptive error messages and appropriate HTTP status codes. + +## Rate Limiting + +The server respects Intercom's API rate limits: +- **Standard rate limit**: 1000 requests per minute +- **Search endpoints**: 333 requests per minute +- **Bulk operations**: Automatic batching to optimize request usage + +## Development + +- **Building:** `npm run build` (compile TypeScript to JavaScript) +- **Development:** `npm run dev` (build and run with file watching) +- **Linting:** `npm run lint` (check code style) +- **Format:** `npm run format` (format code with Prettier) +- **Testing:** `npm test` (run test suite) + +## Architecture + +The server is built with: + +- **Express.js** for HTTP server functionality +- **Model Context Protocol SDK** for MCP implementation +- **AsyncLocalStorage** for request context management +- **TypeScript** for type safety and better development experience +- **Comprehensive validation** for all input parameters +- **Modular handler architecture** for maintainable code organization + +## Project Structure + +src/ +ā”œā”€ā”€ client/ # Intercom API client +│ └── intercomClient.ts +ā”œā”€ā”€ tools/ # MCP tool definitions and handlers +│ ā”œā”€ā”€ definitions/ # Tool schema definitions +│ │ ā”œā”€ā”€ contactTools.ts +│ │ ā”œā”€ā”€ conversationTools.ts +│ │ ā”œā”€ā”€ companyTools.ts +│ │ ā”œā”€ā”€ articleTools.ts +│ │ ā”œā”€ā”€ messageTools.ts +│ │ ā”œā”€ā”€ tagTools.ts +│ │ ā”œā”€ā”€ teamTools.ts +│ │ └── index.ts +│ ā”œā”€ā”€ handlers/ # Tool implementation logic +│ │ ā”œā”€ā”€ contactHandler.ts +│ │ ā”œā”€ā”€ conversationHandler.ts +│ │ ā”œā”€ā”€ companyHandler.ts +│ │ ā”œā”€ā”€ articleHandler.ts +│ │ ā”œā”€ā”€ messageHandler.ts +│ │ ā”œā”€ā”€ tagHandler.ts +│ │ ā”œā”€ā”€ teamHandler.ts +│ │ └── index.ts +│ └── index.ts +ā”œā”€ā”€ transport/ # MCP transport implementations +│ ā”œā”€ā”€ httpTransport.ts +│ └── sseTransport.ts +ā”œā”€ā”€ utils/ # Utility functions +│ ā”œā”€ā”€ validation.ts +│ └── errors.ts +ā”œā”€ā”€ server.ts # MCP server configuration +└── index.ts # Main application entry point + +text + +## Support + +For issues related to this MCP server, please create an issue in the repository. +For Intercom API questions, consult the [Intercom API documentation](https://developers.intercom.com/reference/). diff --git a/mcp_servers/intercom/package-lock.json b/mcp_servers/intercom/package-lock.json new file mode 100644 index 00000000..f4cfeec4 --- /dev/null +++ b/mcp_servers/intercom/package-lock.json @@ -0,0 +1,6454 @@ +{ + "name": "@klavis-ai/mcp-server-intercom", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-intercom", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.17.1", + "dotenv": "^17.2.1", + "express": "^5.1.0", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1" + }, + "bin": { + "intercom-mcp": "dist/index.js" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/express": "^5.0.3", + "@types/jest": "^29.5.14", + "@types/node": "^24.2.0", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "nodemon": "^3.1.10", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.9.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz", + "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.0.tgz", + "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.27.3", + "@babel/helpers": "^7.27.6", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.0", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz", + "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.0", + "@babel/types": "^7.28.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", + "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.2.tgz", + "integrity": "sha512-/V9771t+EgXz62aCcyofnQhGM8DQACbRhvzKFsXKC9QM+5MadF8ZmIm0crDMaz3+o0h0zXfJnd4EhbYbxsrcFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", + "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.0.tgz", + "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.2.tgz", + "integrity": "sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.12", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", + "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", + "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.17.2", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.2.tgz", + "integrity": "sha512-EFLRNXR/ixpXQWu6/3Cu30ndDFIFNaqUXcTqsGebujeMan9FzhAaFFswLRiFj61rgygDRr8WO1N+UijjgRxX9g==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz", + "integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.7.tgz", + "integrity": "sha512-R+33OsgWw7rOhD1emjU7dzCDHucJrgJXMA5PYCzJxVil0dsyx5iBEPHqpPfiKNJQb7lZ1vxwoLR4Z87bBUpeGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.2.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.2.1.tgz", + "integrity": "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.10.0" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", + "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz", + "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/type-utils": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz", + "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz", + "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.18.0.tgz", + "integrity": "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz", + "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz", + "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.18.0.tgz", + "integrity": "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz", + "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.2.0.tgz", + "integrity": "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001733", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001733.tgz", + "integrity": "sha512-e4QKw/O2Kavj2VQTKZWrwzkt3IxOmIlU6ajRb6LP64LHpBo1J67k2Hi4Vu/TgJWsNtynurfS0uK3MaUTCPfu5Q==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", + "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dotenv": { + "version": "17.2.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.1.tgz", + "integrity": "sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.199", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.199.tgz", + "integrity": "sha512-3gl0S7zQd88kCAZRO/DnxtBKuhMO4h0EaQIN3YgZfV6+pW+5+bf2AdQeHNESCoaQqo/gjGVYEf2YM4O5HJQqpQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.2.tgz", + "integrity": "sha512-iI1f+D2ViGn+uvv5HuHVUamg8ll4tN+JRHGc6IJi4TP9Kl976C57fzPXgseXNs8v0iA8aSJpHsTWjDb9QJamGQ==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "license": "MIT" + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.3.tgz", + "integrity": "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", + "dev": true, + "license": "ISC" + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock-extended": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jest-mock-extended/-/jest-mock-extended-4.0.0.tgz", + "integrity": "sha512-7BZpfuvLam+/HC+NxifIi9b+5VXj/utUDMPUqrDJehGWVuXPtLS9Jqlob2mJLrI/pg2k1S8DMfKDvEB88QNjaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ts-essentials": "^10.0.2" + }, + "peerDependencies": { + "@jest/globals": "^28.0.0 || ^29.0.0 || ^30.0.0", + "jest": "^24.0.0 || ^25.0.0 || ^26.0.0 || ^27.0.0 || ^28.0.0 || ^29.0.0 || ^30.0.0", + "typescript": "^3.0.0 || ^4.0.0 || ^5.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/nodemon": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz", + "integrity": "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^4", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/nodemon/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/nodemon/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/nodemon/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/nodemon/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-8.1.0.tgz", + "integrity": "sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw==", + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^6.1.2" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.4.tgz", + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", + "dev": true, + "license": "MIT" + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-cwd/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "license": "BSD-3-Clause", + "dependencies": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shx": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.3", + "shelljs": "^0.8.5" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/touch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", + "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", + "dev": true, + "license": "ISC", + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/ts-api-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", + "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-essentials": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-10.1.1.tgz", + "integrity": "sha512-4aTB7KLHKmUvkjNj8V+EdnmuVTiECzn3K+zIbRthumvHu+j44x3w63xpfs0JL3NGIzGXqoQ7AV591xHO+XrOTw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">=4.5.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/ts-jest": { + "version": "29.4.1", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.1.tgz", + "integrity": "sha512-SaeUtjfpg9Uqu8IbeDKtdaS0g8lS6FT6OzM3ezrDfErPJPHNDo/Ey+VFGP1bQIDfagYDLyRpd7O15XpG1Es2Uw==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "fast-json-stable-stringify": "^2.1.0", + "handlebars": "^4.7.8", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.2", + "type-fest": "^4.41.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jest-util": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "dev": true, + "license": "MIT" + }, + "node_modules/undici-types": { + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/intercom/package.json b/mcp_servers/intercom/package.json new file mode 100644 index 00000000..d2ac0d7c --- /dev/null +++ b/mcp_servers/intercom/package.json @@ -0,0 +1,70 @@ +{ + "name": "@klavis-ai/mcp-server-intercom", + "version": "1.0.0", + "description": "MCP server for Intercom customer messaging platform integration.", + "type": "module", + "bin": { + "intercom-mcp": "dist/index.js" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsc && shx chmod +x dist/index.js", + "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js", + "start": "node dist/index.js", + "dev": "tsc && node dist/index.js", + "clean": "shx rm -rf dist", + "prebuild": "npm run clean", + "lint": "eslint src/**/*.ts", + "lint:fix": "eslint src/**/*.ts --fix", + "format": "prettier --write .", + "format:check": "prettier --check .", + "format:src": "prettier --write src/**/*.{ts,js,json}", + "format:config": "prettier --write *.{json,js,ts,md}", + "prepare": "npm run build", + "publish": "npm run build && npm publish", + "docker:build": "docker build -t intercom-mcp-server .", + "docker:run": "docker run -p 5000:5000 --env-file .env intercom-mcp-server" + }, + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.17.1", + "dotenv": "^17.2.1", + "express": "^5.1.0", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/express": "^5.0.3", + "@types/jest": "^29.5.14", + "@types/node": "^24.2.0", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "nodemon": "^3.1.10", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.9.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "mcp", + "intercom", + "customer-messaging", + "conversations", + "support", + "api-integration" + ], + "author": "Klavis AI" +} diff --git a/mcp_servers/intercom/src/client/intercomClient.ts b/mcp_servers/intercom/src/client/intercomClient.ts new file mode 100644 index 00000000..9878d8ca --- /dev/null +++ b/mcp_servers/intercom/src/client/intercomClient.ts @@ -0,0 +1,73 @@ +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { AsyncLocalStorage } from 'async_hooks'; +import dotenv from 'dotenv'; + +dotenv.config(); + +const INTERCOM_API_URL = 'https://api.intercom.io/'; + +const asyncLocalStorage = new AsyncLocalStorage<{ + intercomClient: IntercomClient; +}>(); + +let mcpServerInstance: Server | null = null; + +export class IntercomClient { + private accessToken: string; + private baseUrl: string; + + constructor(accessToken: string, baseUrl: string = INTERCOM_API_URL) { + this.accessToken = accessToken; + this.baseUrl = baseUrl; + } + + public async makeRequest(endpoint: string, options: RequestInit = {}): Promise { + const url = `${this.baseUrl}${endpoint}`; + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + 'Intercom-Version': '2.11', + ...options.headers, + }; + + const response = await fetch(url, { + ...options, + headers, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error( + `Intercom API error: ${response.status} ${response.statusText} - ${errorText}`, + ); + } + + return response.json(); + } +} + +function getIntercomClient() { + const store = asyncLocalStorage.getStore(); + if (!store || !store.intercomClient) { + throw new Error('Store not found in AsyncLocalStorage'); + } + if (!store.intercomClient) { + throw new Error('Intercom client not found in AsyncLocalStorage'); + } + return store.intercomClient; +} + +function safeLog( + level: 'error' | 'debug' | 'info' | 'notice' | 'warning' | 'critical' | 'alert' | 'emergency', + data: any, +): void { + try { + const logData = typeof data === 'object' ? JSON.stringify(data, null, 2) : data; + console.log(`[${level.toUpperCase()}] ${logData}`); + } catch (error) { + console.log(`[${level.toUpperCase()}] [LOG_ERROR] Could not serialize log data`); + } +} + +export { getIntercomClient, safeLog, asyncLocalStorage, mcpServerInstance }; diff --git a/mcp_servers/intercom/src/index.ts b/mcp_servers/intercom/src/index.ts new file mode 100644 index 00000000..fcd5cea5 --- /dev/null +++ b/mcp_servers/intercom/src/index.ts @@ -0,0 +1,28 @@ +#!/usr/bin/env node +import express from 'express'; +import dotenv from 'dotenv'; +import { HttpTransport } from './transport/httpTransport.js'; +import { SSETransport } from './transport/sseTransport.js'; + +dotenv.config(); + +const app = express(); +const PORT = process.env.PORT || 5000; + +const httpTransport = new HttpTransport(); +const sseTransport = new SSETransport(); + +app.use('/', httpTransport.getRouter()); +app.use('/', sseTransport.getRouter()); + +app.get('/health', (_req, res) => { + res.json({ + status: 'healthy', + timestamp: new Date().toISOString(), + version: '1.0.0', + }); +}); + +app.listen(PORT, () => { + console.log(`Intercom MCP server running on port ${PORT}`); +}); diff --git a/mcp_servers/intercom/src/server.ts b/mcp_servers/intercom/src/server.ts new file mode 100644 index 00000000..e34c034e --- /dev/null +++ b/mcp_servers/intercom/src/server.ts @@ -0,0 +1,783 @@ +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'; +import { getIntercomClient, safeLog } from './client/intercomClient.js'; + +import { + CONTACT_TOOLS, + CONVERSATION_TOOLS, + COMPANY_TOOLS, + ARTICLE_TOOLS, + MESSAGE_TOOLS, + TAG_TOOLS, + TEAM_TOOLS, + createHandlers, +} from './tools/index.js'; + +let mcpServerInstance: Server | null = null; + +export const getIntercomMcpServer = () => { + if (!mcpServerInstance) { + mcpServerInstance = new Server( + { + name: 'intercom-mcp-server', + version: '1.0.0', + }, + { + capabilities: { + tools: {}, + }, + }, + ); + + mcpServerInstance.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + ...CONTACT_TOOLS, + ...CONVERSATION_TOOLS, + ...COMPANY_TOOLS, + ...ARTICLE_TOOLS, + ...MESSAGE_TOOLS, + ...TAG_TOOLS, + ...TEAM_TOOLS, + ], + }; + }); + + mcpServerInstance.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + try { + const intercomClient = getIntercomClient(); + const handlers = createHandlers(intercomClient); + + switch (name) { + // ================== CONTACTS ================== + case 'intercom_list_contacts': { + const result = await handlers.contact.listContacts({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_contact': { + const result = await handlers.contact.getContact((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_contact': { + const result = await handlers.contact.createContact({ + role: (args as any)?.role, + externalId: (args as any)?.external_id, + email: (args as any)?.email, + phone: (args as any)?.phone, + name: (args as any)?.name, + avatar: (args as any)?.avatar, + signedUpAt: (args as any)?.signed_up_at, + lastSeenAt: (args as any)?.last_seen_at, + ownerId: (args as any)?.owner_id, + unsubscribedFromEmails: (args as any)?.unsubscribed_from_emails, + customAttributes: (args as any)?.custom_attributes, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_update_contact': { + const result = await handlers.contact.updateContact((args as any)?.id, { + role: (args as any)?.role, + externalId: (args as any)?.external_id, + email: (args as any)?.email, + phone: (args as any)?.phone, + name: (args as any)?.name, + avatar: (args as any)?.avatar, + signedUpAt: (args as any)?.signed_up_at, + lastSeenAt: (args as any)?.last_seen_at, + ownerId: (args as any)?.owner_id, + unsubscribedFromEmails: (args as any)?.unsubscribed_from_emails, + customAttributes: (args as any)?.custom_attributes, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_contact': { + const result = await handlers.contact.deleteContact((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_search_contacts': { + const result = await handlers.contact.searchContacts({ + query: (args as any)?.query, + pagination: (args as any)?.pagination, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_merge_contact': { + const result = await handlers.contact.mergeContact( + (args as any)?.from, + (args as any)?.into, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_contact_notes': { + const result = await handlers.contact.listContactNotes((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_contact_note': { + const result = await handlers.contact.createContactNote((args as any)?.id, { + body: (args as any)?.body, + adminId: (args as any)?.admin_id, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_contact_tags': { + const result = await handlers.contact.listContactTags((args as any)?.contact_id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_add_contact_tag': { + const result = await handlers.contact.addContactTag( + (args as any)?.contact_id, + (args as any)?.id, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_remove_contact_tag': { + const result = await handlers.contact.removeContactTag( + (args as any)?.contact_id, + (args as any)?.id, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== CONVERSATIONS ================== + case 'intercom_list_conversations': { + const result = await handlers.conversation.listConversations({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + displayAs: (args as any)?.display_as, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_conversation': { + const result = await handlers.conversation.getConversation( + (args as any)?.id, + (args as any)?.display_as, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_conversation': { + const result = await handlers.conversation.createConversation({ + from: (args as any)?.from, + body: (args as any)?.body, + createdAt: (args as any)?.created_at, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_update_conversation': { + const result = await handlers.conversation.updateConversation((args as any)?.id, { + displayAs: (args as any)?.display_as, + read: (args as any)?.read, + title: (args as any)?.title, + customAttributes: (args as any)?.custom_attributes, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_conversation': { + const result = await handlers.conversation.deleteConversation((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_search_conversations': { + const result = await handlers.conversation.searchConversations({ + query: (args as any)?.query, + pagination: (args as any)?.pagination, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_reply_conversation': { + const result = await handlers.conversation.replyToConversation((args as any)?.id, { + messageType: (args as any)?.message_type, + type: (args as any)?.type, + adminId: (args as any)?.admin_id, + intercomUserId: (args as any)?.intercom_user_id, + body: (args as any)?.body, + attachmentUrls: (args as any)?.attachment_urls, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_manage_conversation': { + const result = await handlers.conversation.manageConversation((args as any)?.id, { + messageType: (args as any)?.message_type, + adminId: (args as any)?.admin_id, + assigneeId: (args as any)?.assignee_id, + type: (args as any)?.type, + body: (args as any)?.body, + snoozedUntil: (args as any)?.snoozed_until, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_attach_contact_to_conversation': { + const result = await handlers.conversation.attachContactToConversation( + (args as any)?.id, + { + adminId: (args as any)?.admin_id, + customer: (args as any)?.customer, + }, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_detach_contact_from_conversation': { + const result = await handlers.conversation.detachContactFromConversation( + (args as any)?.conversation_id, + (args as any)?.contact_id, + { + adminId: (args as any)?.admin_id, + customer: (args as any)?.customer, + }, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_redact_conversation': { + const result = await handlers.conversation.redactConversation({ + type: (args as any)?.type, + conversationId: (args as any)?.conversation_id, + conversationPartId: (args as any)?.conversation_part_id, + sourceId: (args as any)?.source_id, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_convert_conversation_to_ticket': { + const result = await handlers.conversation.convertConversationToTicket( + (args as any)?.id, + { + ticketTypeId: (args as any)?.ticket_type_id, + attributes: (args as any)?.attributes, + }, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== COMPANIES ================== + case 'intercom_list_companies': { + const result = await handlers.company.listCompanies({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + order: (args as any)?.order, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_company': { + const result = await handlers.company.getCompany((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_company': { + const result = await handlers.company.createCompany({ + name: (args as any)?.name, + companyId: (args as any)?.company_id, + plan: (args as any)?.plan, + size: (args as any)?.size, + website: (args as any)?.website, + industry: (args as any)?.industry, + remoteCreatedAt: (args as any)?.remote_created_at, + monthlySpend: (args as any)?.monthly_spend, + customAttributes: (args as any)?.custom_attributes, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_update_company': { + const result = await handlers.company.updateCompany((args as any)?.id, { + name: (args as any)?.name, + plan: (args as any)?.plan, + size: (args as any)?.size, + website: (args as any)?.website, + industry: (args as any)?.industry, + remoteCreatedAt: (args as any)?.remote_created_at, + monthlySpend: (args as any)?.monthly_spend, + customAttributes: (args as any)?.custom_attributes, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_company': { + const result = await handlers.company.deleteCompany((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_find_company': { + const result = await handlers.company.findCompany((args as any)?.company_id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_company_users': { + const result = await handlers.company.listCompanyUsers((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_attach_contact_to_company': { + const result = await handlers.company.attachContactToCompany( + (args as any)?.id, + (args as any)?.contact_id, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_detach_contact_from_company': { + const result = await handlers.company.detachContactFromCompany( + (args as any)?.id, + (args as any)?.contact_id, + ); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_company_segments': { + const result = await handlers.company.listCompanySegments((args as any)?.company_id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_company_tags': { + const result = await handlers.company.listCompanyTags((args as any)?.company_id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_tag_company': { + const result = await handlers.company.tagCompany({ + name: (args as any)?.name, + companies: (args as any)?.companies, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_untag_company': { + const result = await handlers.company.untagCompany({ + name: (args as any)?.name, + companies: (args as any)?.companies, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== ARTICLES ================== + case 'intercom_list_articles': { + const result = await handlers.article.listArticles({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_article': { + const result = await handlers.article.getArticle((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_article': { + const result = await handlers.article.createArticle({ + title: (args as any)?.title, + description: (args as any)?.description, + body: (args as any)?.body, + authorId: (args as any)?.author_id, + state: (args as any)?.state, + parentId: (args as any)?.parent_id, + parentType: (args as any)?.parent_type, + translatedContent: (args as any)?.translated_content, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_update_article': { + const result = await handlers.article.updateArticle((args as any)?.id, { + title: (args as any)?.title, + description: (args as any)?.description, + body: (args as any)?.body, + authorId: (args as any)?.author_id, + state: (args as any)?.state, + parentId: (args as any)?.parent_id, + parentType: (args as any)?.parent_type, + translatedContent: (args as any)?.translated_content, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_article': { + const result = await handlers.article.deleteArticle((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_search_articles': { + const result = await handlers.article.searchArticles({ + phrase: (args as any)?.phrase, + state: (args as any)?.state, + authorId: (args as any)?.author_id, + parentId: (args as any)?.parent_id, + parentType: (args as any)?.parent_type, + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_collections': { + const result = await handlers.article.listCollections({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_collection': { + const result = await handlers.article.getCollection((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_collection': { + const result = await handlers.article.createCollection({ + name: (args as any)?.name, + description: (args as any)?.description, + parentId: (args as any)?.parent_id, + helpCenterId: (args as any)?.help_center_id, + translatedContent: (args as any)?.translated_content, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_update_collection': { + const result = await handlers.article.updateCollection((args as any)?.id, { + name: (args as any)?.name, + description: (args as any)?.description, + parentId: (args as any)?.parent_id, + translatedContent: (args as any)?.translated_content, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_collection': { + const result = await handlers.article.deleteCollection((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== MESSAGES ================== + case 'intercom_create_message': { + const result = await handlers.message.createMessage({ + messageType: (args as any)?.message_type, + subject: (args as any)?.subject, + body: (args as any)?.body, + template: (args as any)?.template, + from: (args as any)?.from, + to: (args as any)?.to, + createdAt: (args as any)?.created_at, + createConversationWithoutContactReply: (args as any) + ?.create_conversation_without_contact_reply, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_messages': { + const result = await handlers.message.listMessages({ + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_message': { + const result = await handlers.message.getMessage((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_note': { + const result = await handlers.message.createNote({ + body: (args as any)?.body, + contactId: (args as any)?.contact_id, + adminId: (args as any)?.admin_id, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_notes': { + const result = await handlers.message.listNotes({ + contactId: (args as any)?.contact_id, + startingAfter: (args as any)?.starting_after, + perPage: (args as any)?.per_page, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_note': { + const result = await handlers.message.getNote((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_send_user_message': { + const result = await handlers.message.sendUserMessage({ + from: (args as any)?.from, + body: (args as any)?.body, + createdAt: (args as any)?.created_at, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== TAGS ================== + case 'intercom_list_tags': { + const result = await handlers.tag.listTags(); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_tag': { + const result = await handlers.tag.getTag((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_create_or_update_tag': { + const result = await handlers.tag.createOrUpdateTag({ + name: (args as any)?.name, + id: (args as any)?.id, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_tag_companies': { + const result = await handlers.tag.tagCompanies({ + name: (args as any)?.name, + companies: (args as any)?.companies, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_untag_companies': { + const result = await handlers.tag.untagCompanies({ + name: (args as any)?.name, + companies: (args as any)?.companies, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_tag_users': { + const result = await handlers.tag.tagUsers({ + name: (args as any)?.name, + users: (args as any)?.users, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_delete_tag': { + const result = await handlers.tag.deleteTag((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + // ================== TEAMS ================== + case 'intercom_list_teams': { + const result = await handlers.team.listTeams(); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_team': { + const result = await handlers.team.getTeam((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_admins': { + const result = await handlers.team.listAdmins(); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_admin': { + const result = await handlers.team.getAdmin((args as any)?.id); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_get_current_admin': { + const result = await handlers.team.getCurrentAdmin(); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_set_admin_away': { + const result = await handlers.team.setAdminAway((args as any)?.id, { + awayModeEnabled: (args as any)?.away_mode_enabled, + awayModeReassign: (args as any)?.away_mode_reassign, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + case 'intercom_list_admin_activity_logs': { + const result = await handlers.team.listAdminActivityLogs({ + createdAtAfter: (args as any)?.created_at_after, + createdAtBefore: (args as any)?.created_at_before, + }); + return { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }; + } + + default: + throw new Error(`Unknown tool: ${name}`); + } + } catch (error: any) { + safeLog('error', `Tool ${name} failed: ${error.message}`); + return { + content: [ + { + type: 'text', + text: `Error: ${error.message}`, + }, + ], + isError: true, + }; + } + }); + } + + return mcpServerInstance; +}; diff --git a/mcp_servers/intercom/src/tools/definitions/articleTools.ts b/mcp_servers/intercom/src/tools/definitions/articleTools.ts new file mode 100644 index 00000000..26e658da --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/articleTools.ts @@ -0,0 +1,501 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all articles + */ +const LIST_ARTICLES_TOOL: Tool = { + name: 'intercom_list_articles', + description: 'List all articles in your Help Center with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: [], + }, +}; + +/** + * Get a specific article by ID + */ +const GET_ARTICLE_TOOL: Tool = { + name: 'intercom_get_article', + description: 'Retrieve a specific article by its ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The unique identifier for the article which is given by Intercom', + example: 123, + }, + }, + required: ['id'], + }, +}; + +/** + * Create a new article + */ +const CREATE_ARTICLE_TOOL: Tool = { + name: 'intercom_create_article', + description: 'Create a new article in your Help Center.', + inputSchema: { + type: 'object', + properties: { + title: { + type: 'string', + description: + "The title of the article. For multilingual articles, this will be the title of the default language's content", + example: 'Thanks for everything', + }, + description: { + type: 'string', + description: + "The description of the article. For multilingual articles, this will be the description of the default language's content", + example: 'Description of the Article', + }, + body: { + type: 'string', + description: + "The content of the article. For multilingual articles, this will be the body of the default language's content", + example: '

Body of the Article

', + }, + author_id: { + type: 'integer', + description: + "The id of the author of the article. For multilingual articles, this will be the id of the author of the default language's content. Must be a teammate on the help center's workspace", + example: 1295, + }, + state: { + type: 'string', + description: + "Whether the article will be published or will be a draft. Defaults to draft. For multilingual articles, this will be the state of the default language's content", + enum: ['published', 'draft'], + default: 'draft', + example: 'published', + }, + parent_id: { + type: 'integer', + description: + "The id of the article's parent collection or section. An article without this field stands alone", + example: 18, + }, + parent_type: { + type: 'string', + description: 'The type of parent, which can either be a collection or section', + enum: ['collection', 'section'], + example: 'collection', + }, + translated_content: { + type: 'object', + description: + 'The translated content of the article. The keys are the locale codes and the values are the translated content of the article', + additionalProperties: { + type: 'object', + properties: { + title: { + type: 'string', + description: 'The title of the article in the specified language', + }, + description: { + type: 'string', + description: 'The description of the article in the specified language', + }, + body: { + type: 'string', + description: 'The content of the article in the specified language', + }, + author_id: { + type: 'integer', + description: 'The id of the author for this language version', + }, + state: { + type: 'string', + enum: ['published', 'draft'], + description: 'The state of the article in the specified language', + }, + }, + required: ['title', 'author_id', 'state'], + }, + example: { + fr: { + title: 'Merci pour tout', + description: "Description de l'article", + body: "Corps de l'article", + author_id: 991266252, + state: 'published', + }, + }, + }, + }, + required: ['title', 'author_id'], + }, +}; + +/** + * Update an existing article + */ +const UPDATE_ARTICLE_TOOL: Tool = { + name: 'intercom_update_article', + description: 'Update an existing article in your Help Center.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The unique identifier for the article which is given by Intercom', + example: 123, + }, + title: { + type: 'string', + description: + "The title of the article. For multilingual articles, this will be the title of the default language's content", + example: 'Updated title', + }, + description: { + type: 'string', + description: + "The description of the article. For multilingual articles, this will be the description of the default language's content", + example: 'Updated description', + }, + body: { + type: 'string', + description: + "The content of the article. For multilingual articles, this will be the body of the default language's content", + example: '

Updated body content

', + }, + author_id: { + type: 'integer', + description: + "The id of the author of the article. For multilingual articles, this will be the id of the author of the default language's content. Must be a teammate on the help center's workspace", + example: 1295, + }, + state: { + type: 'string', + description: + "Whether the article will be published or will be a draft. For multilingual articles, this will be the state of the default language's content", + enum: ['published', 'draft'], + example: 'published', + }, + parent_id: { + type: 'string', + description: + "The id of the article's parent collection or section. An article without this field stands alone", + example: '18', + }, + parent_type: { + type: 'string', + description: 'The type of parent, which can either be a collection or section', + enum: ['collection', 'section'], + example: 'collection', + }, + translated_content: { + type: 'object', + description: + 'The translated content of the article. The keys are the locale codes and the values are the translated content of the article', + additionalProperties: { + type: 'object', + properties: { + title: { + type: 'string', + description: 'The title of the article in the specified language', + }, + description: { + type: 'string', + description: 'The description of the article in the specified language', + }, + body: { + type: 'string', + description: 'The content of the article in the specified language', + }, + author_id: { + type: 'integer', + description: 'The id of the author for this language version', + }, + state: { + type: 'string', + enum: ['published', 'draft'], + description: 'The state of the article in the specified language', + }, + }, + }, + }, + }, + required: ['id'], + }, +}; + +/** + * Delete an article + */ +const DELETE_ARTICLE_TOOL: Tool = { + name: 'intercom_delete_article', + description: 'Delete a single article from your Help Center.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The unique identifier for the article which is given by Intercom', + example: 123, + }, + }, + required: ['id'], + }, +}; + +/** + * Search articles + */ +const SEARCH_ARTICLES_TOOL: Tool = { + name: 'intercom_search_articles', + description: 'Search articles in your Help Center using query filters.', + inputSchema: { + type: 'object', + properties: { + phrase: { + type: 'string', + description: 'The search phrase to look for in articles', + example: 'getting started', + }, + state: { + type: 'string', + description: 'Filter articles by their publication state', + enum: ['published', 'draft'], + example: 'published', + }, + author_id: { + type: 'integer', + description: 'Filter articles by author ID', + example: 1295, + }, + parent_id: { + type: 'integer', + description: 'Filter articles by parent collection or section ID', + example: 18, + }, + parent_type: { + type: 'string', + description: 'Filter articles by parent type', + enum: ['collection', 'section'], + example: 'collection', + }, + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: [], + }, +}; + +/** + * List help center collections + */ +const LIST_COLLECTIONS_TOOL: Tool = { + name: 'intercom_list_collections', + description: 'List all Help Center collections with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: [], + }, +}; + +/** + * Get a specific collection by ID + */ +const GET_COLLECTION_TOOL: Tool = { + name: 'intercom_get_collection', + description: 'Retrieve a specific Help Center collection by its ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the collection which is given by Intercom', + example: '123', + }, + }, + required: ['id'], + }, +}; + +/** + * Create a new collection + */ +const CREATE_COLLECTION_TOOL: Tool = { + name: 'intercom_create_collection', + description: 'Create a new Help Center collection.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: + "The name of the collection. For multilingual collections, this will be the name of the default language's content", + example: 'Getting Started', + }, + description: { + type: 'string', + description: + "The description of the collection. For multilingual collections, this will be the description of the default language's content", + example: 'Collection for getting started articles', + }, + parent_id: { + type: 'string', + description: + 'The id of the parent collection. If null then it will be created as the first level collection', + example: '6871118', + nullable: true, + }, + help_center_id: { + type: 'integer', + description: + 'The id of the help center where the collection will be created. If null then it will be created in the default help center', + example: 123, + nullable: true, + }, + translated_content: { + type: 'object', + description: + 'The translated content of the collection. The keys are the locale codes and the values are the translated content', + additionalProperties: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the collection in the specified language', + }, + description: { + type: 'string', + description: 'The description of the collection in the specified language', + }, + }, + }, + nullable: true, + }, + }, + required: ['name'], + }, +}; + +/** + * Update an existing collection + */ +const UPDATE_COLLECTION_TOOL: Tool = { + name: 'intercom_update_collection', + description: 'Update an existing Help Center collection.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the collection which is given by Intercom', + example: '123', + }, + name: { + type: 'string', + description: + "The name of the collection. For multilingual collections, this will be the name of the default language's content", + example: 'Updated Collection Name', + }, + description: { + type: 'string', + description: + "The description of the collection. For multilingual collections, this will be the description of the default language's content", + example: 'Updated collection description', + }, + parent_id: { + type: 'string', + description: + 'The id of the parent collection. If null then it will be updated as the first level collection', + example: '6871118', + nullable: true, + }, + translated_content: { + type: 'object', + description: + 'The translated content of the collection. The keys are the locale codes and the values are the translated content', + additionalProperties: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the collection in the specified language', + }, + description: { + type: 'string', + description: 'The description of the collection in the specified language', + }, + }, + }, + nullable: true, + }, + }, + required: ['id'], + }, +}; + +/** + * Delete a collection + */ +const DELETE_COLLECTION_TOOL: Tool = { + name: 'intercom_delete_collection', + description: 'Delete a single Help Center collection.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the collection which is given by Intercom', + example: '123', + }, + }, + required: ['id'], + }, +}; + +export const ARTICLE_TOOLS = [ + LIST_ARTICLES_TOOL, + GET_ARTICLE_TOOL, + CREATE_ARTICLE_TOOL, + UPDATE_ARTICLE_TOOL, + DELETE_ARTICLE_TOOL, + SEARCH_ARTICLES_TOOL, + LIST_COLLECTIONS_TOOL, + GET_COLLECTION_TOOL, + CREATE_COLLECTION_TOOL, + UPDATE_COLLECTION_TOOL, + DELETE_COLLECTION_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/companyTools.ts b/mcp_servers/intercom/src/tools/definitions/companyTools.ts new file mode 100644 index 00000000..bdf98273 --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/companyTools.ts @@ -0,0 +1,439 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all companies + */ +const LIST_COMPANIES_TOOL: Tool = { + name: 'intercom_list_companies', + description: 'List all companies in your Intercom workspace with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + order: { + type: 'string', + description: 'The order of the results', + enum: ['asc', 'desc'], + default: 'desc', + }, + }, + required: [], + }, +}; + +/** + * Get a specific company by ID + */ +const GET_COMPANY_TOOL: Tool = { + name: 'intercom_get_company', + description: 'Retrieve a specific company by their Intercom ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + }, + required: ['id'], + }, +}; + +/** + * Create a new company + */ +const CREATE_COMPANY_TOOL: Tool = { + name: 'intercom_create_company', + description: 'Create a new company in Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the Company', + example: 'Intercom', + }, + company_id: { + type: 'string', + description: "The company id you have defined for the company. Can't be updated", + example: '625e90fc55ab113b6d92175f', + }, + plan: { + type: 'string', + description: 'The name of the plan you have associated with the company', + example: 'Enterprise', + }, + size: { + type: 'integer', + description: 'The number of employees in this company', + example: 100, + }, + website: { + type: 'string', + description: + "The URL for this company's website. Please note that the value specified here is not validated. Accepts any string", + example: 'https://www.example.com/', + }, + industry: { + type: 'string', + description: 'The industry that this company operates in', + example: 'Manufacturing', + }, + remote_created_at: { + type: 'integer', + format: 'date-time', + description: 'The time the company was created by you', + example: 1394531169, + }, + monthly_spend: { + type: 'integer', + description: + 'How much revenue the company generates for your business. Note that this will truncate floats. i.e. it only allow for whole integers, 155.98 will be truncated to 155. Note that this has an upper limit of 2**31-1 or 2147483647', + example: 1000, + }, + custom_attributes: { + type: 'object', + description: + 'A hash of key/value pairs containing any other data about the company you want Intercom to store', + additionalProperties: true, + example: { + paid_subscriber: true, + monthly_spend: 155.5, + team_mates: 9, + }, + }, + }, + anyOf: [ + { + required: ['name'], + title: 'Create company with name', + }, + { + required: ['company_id'], + title: 'Create company with company_id', + }, + ], + }, +}; + +/** + * Update an existing company + */ +const UPDATE_COMPANY_TOOL: Tool = { + name: 'intercom_update_company', + description: 'Update an existing company in Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + name: { + type: 'string', + description: 'The name of the Company', + example: 'Intercom', + }, + plan: { + type: 'string', + description: 'The name of the plan you have associated with the company', + example: 'Enterprise', + }, + size: { + type: 'integer', + description: 'The number of employees in this company', + example: 100, + }, + website: { + type: 'string', + description: + "The URL for this company's website. Please note that the value specified here is not validated. Accepts any string", + example: 'https://www.example.com/', + }, + industry: { + type: 'string', + description: 'The industry that this company operates in', + example: 'Manufacturing', + }, + remote_created_at: { + type: 'integer', + format: 'date-time', + description: 'The time the company was created by you', + example: 1394531169, + }, + monthly_spend: { + type: 'integer', + description: + 'How much revenue the company generates for your business. Note that this will truncate floats. i.e. it only allow for whole integers, 155.98 will be truncated to 155. Note that this has an upper limit of 2**31-1 or 2147483647', + example: 1000, + }, + custom_attributes: { + type: 'object', + description: + 'A hash of key/value pairs containing any other data about the company you want Intercom to store', + additionalProperties: true, + example: { + paid_subscriber: true, + monthly_spend: 155.5, + team_mates: 9, + }, + }, + }, + required: ['id'], + }, +}; + +/** + * Delete a company + */ +const DELETE_COMPANY_TOOL: Tool = { + name: 'intercom_delete_company', + description: 'Delete a single company from Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + }, + required: ['id'], + }, +}; + +/** + * Find company by company_id + */ +const FIND_COMPANY_TOOL: Tool = { + name: 'intercom_find_company', + description: 'Find a company using your own company_id (external identifier).', + inputSchema: { + type: 'object', + properties: { + company_id: { + type: 'string', + description: 'The company_id you have defined for the company', + example: '625e90fc55ab113b6d92175f', + }, + }, + required: ['company_id'], + }, +}; + +/** + * List company users + */ +const LIST_COMPANY_USERS_TOOL: Tool = { + name: 'intercom_list_company_users', + description: 'List all users that belong to a specific company.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + }, + required: ['id'], + }, +}; + +/** + * Attach a contact to a company + */ +const ATTACH_CONTACT_TO_COMPANY_TOOL: Tool = { + name: 'intercom_attach_contact_to_company', + description: 'Attach a contact to a company.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['id', 'contact_id'], + }, +}; + +/** + * Detach a contact from a company + */ +const DETACH_CONTACT_FROM_COMPANY_TOOL: Tool = { + name: 'intercom_detach_contact_from_company', + description: 'Detach a contact from a company.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['id', 'contact_id'], + }, +}; + +/** + * List company segments + */ +const LIST_COMPANY_SEGMENTS_TOOL: Tool = { + name: 'intercom_list_company_segments', + description: 'List all segments that a specific company belongs to.', + inputSchema: { + type: 'object', + properties: { + company_id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + }, + required: ['company_id'], + }, +}; + +/** + * List company tags + */ +const LIST_COMPANY_TAGS_TOOL: Tool = { + name: 'intercom_list_company_tags', + description: 'List all tags attached to a specific company.', + inputSchema: { + type: 'object', + properties: { + company_id: { + type: 'string', + description: 'The unique identifier for the company which is given by Intercom', + example: '531ee472cce572a6ec000006', + }, + }, + required: ['company_id'], + }, +}; + +/** + * Tag a company + */ +const TAG_COMPANY_TOOL: Tool = { + name: 'intercom_tag_company', + description: 'Add a tag to a specific company.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the tag, which will be created if not found', + example: 'Enterprise', + }, + companies: { + type: 'array', + description: 'Array of companies to tag', + items: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The Intercom defined id representing the company', + example: '531ee472cce572a6ec000006', + }, + company_id: { + type: 'string', + description: 'The company id you have defined for the company', + example: '6', + }, + }, + oneOf: [{ required: ['id'] }, { required: ['company_id'] }], + }, + }, + }, + required: ['name', 'companies'], + }, +}; + +/** + * Untag a company + */ +const UNTAG_COMPANY_TOOL: Tool = { + name: 'intercom_untag_company', + description: 'Remove a tag from a specific company.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the tag which will be untagged from the company', + example: 'Enterprise', + }, + companies: { + type: 'array', + description: 'Array of companies to untag', + items: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The Intercom defined id representing the company', + example: '531ee472cce572a6ec000006', + }, + company_id: { + type: 'string', + description: 'The company id you have defined for the company', + example: '6', + }, + }, + oneOf: [{ required: ['id'] }, { required: ['company_id'] }], + }, + }, + untag: { + type: 'boolean', + description: 'Always set to true', + enum: [true], + default: true, + }, + }, + required: ['name', 'companies', 'untag'], + }, +}; + +export const COMPANY_TOOLS = [ + LIST_COMPANIES_TOOL, + GET_COMPANY_TOOL, + CREATE_COMPANY_TOOL, + UPDATE_COMPANY_TOOL, + DELETE_COMPANY_TOOL, + FIND_COMPANY_TOOL, + LIST_COMPANY_USERS_TOOL, + ATTACH_CONTACT_TO_COMPANY_TOOL, + DETACH_CONTACT_FROM_COMPANY_TOOL, + LIST_COMPANY_SEGMENTS_TOOL, + LIST_COMPANY_TAGS_TOOL, + TAG_COMPANY_TOOL, + UNTAG_COMPANY_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/contactTools.ts b/mcp_servers/intercom/src/tools/definitions/contactTools.ts new file mode 100644 index 00000000..4c3aa128 --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/contactTools.ts @@ -0,0 +1,506 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all contacts in Intercom workspace + */ +const LIST_CONTACTS_TOOL: Tool = { + name: 'intercom_list_contacts', + description: + 'List all contacts (users or leads) in your Intercom workspace with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: [], + }, +}; + +/** + * Get a specific contact by ID + */ +const GET_CONTACT_TOOL: Tool = { + name: 'intercom_get_contact', + description: 'Retrieve a specific contact by their Intercom ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['id'], + }, +}; + +/** + * Create a new contact + */ +const CREATE_CONTACT_TOOL: Tool = { + name: 'intercom_create_contact', + description: 'Create a new contact in Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + role: { + type: 'string', + description: 'The role of the contact', + enum: ['user', 'lead'], + }, + external_id: { + type: 'string', + description: 'A unique identifier for the contact which is given to Intercom', + }, + email: { + type: 'string', + format: 'email', + description: 'The contacts email', + example: 'jdoe@example.com', + }, + phone: { + type: 'string', + description: 'The contacts phone number', + example: '+353871234567', + nullable: true, + }, + name: { + type: 'string', + description: 'The contacts name', + example: 'John Doe', + nullable: true, + }, + avatar: { + type: 'string', + format: 'uri', + description: 'An image URL containing the avatar of a contact', + example: 'https://www.example.com/avatar_image.jpg', + nullable: true, + }, + signed_up_at: { + type: 'integer', + format: 'date-time', + description: 'The time specified for when a contact signed up (UNIX timestamp)', + example: 1571672154, + nullable: true, + }, + last_seen_at: { + type: 'integer', + format: 'date-time', + description: 'The time when the contact was last seen (UNIX timestamp)', + example: 1571672154, + nullable: true, + }, + owner_id: { + type: 'integer', + description: 'The id of an admin that has been assigned account ownership of the contact', + example: 123, + nullable: true, + }, + unsubscribed_from_emails: { + type: 'boolean', + description: 'Whether the contact is unsubscribed from emails', + example: true, + nullable: true, + }, + custom_attributes: { + type: 'object', + description: 'The custom attributes which are set for the contact', + nullable: true, + additionalProperties: true, + }, + }, + anyOf: [ + { + required: ['email'], + title: 'Create contact with email', + }, + { + required: ['external_id'], + title: 'Create contact with external_id', + }, + { + required: ['role'], + title: 'Create contact with role', + }, + ], + }, +}; + +/** + * Update an existing contact + */ +const UPDATE_CONTACT_TOOL: Tool = { + name: 'intercom_update_contact', + description: 'Update an existing contact in Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + role: { + type: 'string', + description: 'The role of the contact', + enum: ['user', 'lead'], + }, + external_id: { + type: 'string', + description: 'A unique identifier for the contact which is given to Intercom', + }, + email: { + type: 'string', + format: 'email', + description: 'The contacts email', + example: 'jdoe@example.com', + }, + phone: { + type: 'string', + description: 'The contacts phone number', + example: '+353871234567', + nullable: true, + }, + name: { + type: 'string', + description: 'The contacts name', + example: 'John Doe', + nullable: true, + }, + avatar: { + type: 'string', + format: 'uri', + description: 'An image URL containing the avatar of a contact', + example: 'https://www.example.com/avatar_image.jpg', + nullable: true, + }, + signed_up_at: { + type: 'integer', + format: 'date-time', + description: 'The time specified for when a contact signed up (UNIX timestamp)', + example: 1571672154, + nullable: true, + }, + last_seen_at: { + type: 'integer', + format: 'date-time', + description: 'The time when the contact was last seen (UNIX timestamp)', + example: 1571672154, + nullable: true, + }, + owner_id: { + type: 'integer', + description: 'The id of an admin that has been assigned account ownership of the contact', + example: 123, + nullable: true, + }, + unsubscribed_from_emails: { + type: 'boolean', + description: 'Whether the contact is unsubscribed from emails', + example: true, + nullable: true, + }, + custom_attributes: { + type: 'object', + description: 'The custom attributes which are set for the contact', + nullable: true, + additionalProperties: true, + }, + }, + required: ['id'], + }, +}; + +/** + * Search contacts with query filters + */ +const SEARCH_CONTACTS_TOOL: Tool = { + name: 'intercom_search_contacts', + description: + 'Search contacts using query filters and operators with advanced search capabilities.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'object', + description: 'Search query with filters', + anyOf: [ + { + title: 'Single filter search', + type: 'object', + properties: { + field: { + type: 'string', + description: 'The field to search on', + example: 'email', + }, + operator: { + type: 'string', + enum: ['=', '!=', 'IN', 'NIN', '<', '>', '~', '!~', '^', '$'], + description: 'The operator to use for the search', + example: '=', + }, + value: { + oneOf: [ + { type: 'string' }, + { type: 'integer' }, + { type: 'array', items: { oneOf: [{ type: 'string' }, { type: 'integer' }] } }, + ], + description: 'The value to search for', + example: 'user@example.com', + }, + }, + required: ['field', 'operator', 'value'], + }, + { + title: 'Multiple filter search', + type: 'object', + properties: { + operator: { + type: 'string', + enum: ['AND', 'OR'], + description: 'Boolean operator to combine multiple filters', + example: 'AND', + }, + value: { + type: 'array', + description: 'Array of filter objects', + items: { + type: 'object', + properties: { + field: { type: 'string' }, + operator: { + type: 'string', + enum: ['=', '!=', 'IN', 'NIN', '<', '>', '~', '!~', '^', '$'], + }, + value: { + oneOf: [ + { type: 'string' }, + { type: 'integer' }, + { + type: 'array', + items: { oneOf: [{ type: 'string' }, { type: 'integer' }] }, + }, + ], + }, + }, + required: ['field', 'operator', 'value'], + }, + }, + }, + required: ['operator', 'value'], + }, + ], + }, + pagination: { + type: 'object', + description: 'Pagination options', + properties: { + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + starting_after: { + type: 'string', + description: 'Cursor for pagination', + nullable: true, + }, + }, + }, + }, + required: ['query'], + }, +}; + +/** + * Delete a contact + */ +const DELETE_CONTACT_TOOL: Tool = { + name: 'intercom_delete_contact', + description: 'Delete a single contact from Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['id'], + }, +}; + +/** + * Merge a lead and a user + */ +const MERGE_CONTACT_TOOL: Tool = { + name: 'intercom_merge_contact', + description: 'Merge a contact with a role of lead into a contact with a role of user.', + inputSchema: { + type: 'object', + properties: { + from: { + type: 'string', + description: 'The unique identifier for the contact to merge away from (must be a lead)', + example: '5d70dd30de4efd54f42fd526', + }, + into: { + type: 'string', + description: 'The unique identifier for the contact to merge into (must be a user)', + example: '5ba682d23d7cf92bef87bfd4', + }, + }, + required: ['from', 'into'], + }, +}; + +/** + * List notes for a contact + */ +const LIST_CONTACT_NOTES_TOOL: Tool = { + name: 'intercom_list_contact_notes', + description: 'List all notes attached to a specific contact.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['id'], + }, +}; + +/** + * Create a note for a contact + */ +const CREATE_CONTACT_NOTE_TOOL: Tool = { + name: 'intercom_create_contact_note', + description: 'Add a note to a specific contact.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + body: { + type: 'string', + description: 'The text of the note', + example: 'New note content', + }, + contact_id: { + type: 'string', + description: 'The unique identifier of the contact (alternative to id in path)', + example: '123', + }, + admin_id: { + type: 'string', + description: 'The unique identifier of the admin creating the note', + example: '123', + }, + }, + required: ['id', 'body'], + }, +}; + +/** + * List tags attached to a contact + */ +const LIST_CONTACT_TAGS_TOOL: Tool = { + name: 'intercom_list_contact_tags', + description: 'List all tags attached to a specific contact.', + inputSchema: { + type: 'object', + properties: { + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + }, + required: ['contact_id'], + }, +}; + +/** + * Add tag to a contact + */ +const ADD_CONTACT_TAG_TOOL: Tool = { + name: 'intercom_add_contact_tag', + description: 'Add a tag to a specific contact.', + inputSchema: { + type: 'object', + properties: { + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + id: { + type: 'string', + description: 'The unique identifier for the tag which is given by Intercom', + example: '7522907', + }, + }, + required: ['contact_id', 'id'], + }, +}; + +/** + * Remove tag from a contact + */ +const REMOVE_CONTACT_TAG_TOOL: Tool = { + name: 'intercom_remove_contact_tag', + description: 'Remove a tag from a specific contact.', + inputSchema: { + type: 'object', + properties: { + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '63a07ddf05a32042dffac965', + }, + id: { + type: 'string', + description: 'The unique identifier for the tag which is given by Intercom', + example: '7522907', + }, + }, + required: ['contact_id', 'id'], + }, +}; + +export const CONTACT_TOOLS = [ + LIST_CONTACTS_TOOL, + GET_CONTACT_TOOL, + CREATE_CONTACT_TOOL, + UPDATE_CONTACT_TOOL, + SEARCH_CONTACTS_TOOL, + DELETE_CONTACT_TOOL, + MERGE_CONTACT_TOOL, + LIST_CONTACT_NOTES_TOOL, + CREATE_CONTACT_NOTE_TOOL, + LIST_CONTACT_TAGS_TOOL, + ADD_CONTACT_TAG_TOOL, + REMOVE_CONTACT_TAG_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/conversationTools.ts b/mcp_servers/intercom/src/tools/definitions/conversationTools.ts new file mode 100644 index 00000000..a152cf9a --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/conversationTools.ts @@ -0,0 +1,525 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all conversations + */ +const LIST_CONVERSATIONS_TOOL: Tool = { + name: 'intercom_list_conversations', + description: 'List all conversations in your Intercom workspace with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 20, max: 150)', + minimum: 1, + maximum: 150, + default: 20, + }, + display_as: { + type: 'string', + description: 'Set to plaintext to retrieve conversation messages in plain text', + enum: ['plaintext'], + }, + }, + required: [], + }, +}; + +/** + * Get a specific conversation by ID + */ +const GET_CONVERSATION_TOOL: Tool = { + name: 'intercom_get_conversation', + description: 'Retrieve a specific conversation by its ID with all conversation parts.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The id of the conversation to target', + example: 123, + }, + display_as: { + type: 'string', + description: 'Set to plaintext to retrieve conversation messages in plain text', + enum: ['plaintext'], + }, + }, + required: ['id'], + }, +}; + +/** + * Create a new conversation + */ +const CREATE_CONVERSATION_TOOL: Tool = { + name: 'intercom_create_conversation', + description: + 'Create a conversation that has been initiated by a contact (user or lead). The conversation can be an in-app message only.', + inputSchema: { + type: 'object', + properties: { + from: { + type: 'object', + description: 'The contact who initiated the conversation', + properties: { + type: { + type: 'string', + enum: ['lead', 'user', 'contact'], + description: 'The role associated to the contact - user, lead, or contact', + example: 'user', + }, + id: { + type: 'string', + description: 'The identifier for the contact which is given by Intercom', + format: 'uuid', + minLength: 24, + maxLength: 24, + example: '536e564f316c83104c000020', + }, + }, + required: ['type', 'id'], + }, + body: { + type: 'string', + description: 'The content of the message. HTML is not supported', + example: 'Hello', + }, + created_at: { + type: 'integer', + format: 'date-time', + description: + 'The time the conversation was created as a UTC Unix timestamp. If not provided, the current time will be used', + example: 1671028894, + }, + }, + required: ['from', 'body'], + }, +}; + +/** + * Update a conversation + */ +const UPDATE_CONVERSATION_TOOL: Tool = { + name: 'intercom_update_conversation', + description: 'Update an existing conversation including custom attributes and read status.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The id of the conversation to target', + example: 123, + }, + display_as: { + type: 'string', + description: 'Set to plaintext to retrieve conversation messages in plain text', + enum: ['plaintext'], + }, + read: { + type: 'boolean', + description: 'Mark a conversation as read within Intercom', + example: true, + }, + title: { + type: 'string', + description: 'The title given to the conversation', + example: 'Conversation Title', + }, + custom_attributes: { + type: 'object', + description: 'The custom attributes which are set for the conversation', + additionalProperties: true, + }, + }, + required: ['id'], + }, +}; + +/** + * Delete a conversation + */ +const DELETE_CONVERSATION_TOOL: Tool = { + name: 'intercom_delete_conversation', + description: 'Delete a single conversation from Intercom workspace.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The id of the conversation to delete', + }, + }, + required: ['id'], + }, +}; + +/** + * Search conversations + */ +const SEARCH_CONVERSATIONS_TOOL: Tool = { + name: 'intercom_search_conversations', + description: + 'Search conversations using query filters and operators with advanced search capabilities.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'object', + description: 'Search query with filters', + anyOf: [ + { + title: 'Single filter search', + type: 'object', + properties: { + field: { + type: 'string', + description: 'The field to search on', + example: 'created_at', + }, + operator: { + type: 'string', + enum: ['=', '!=', 'IN', 'NIN', '<', '>', '~', '!~', '^', '$'], + description: 'The operator to use for the search', + example: '>', + }, + value: { + oneOf: [ + { type: 'string' }, + { type: 'integer' }, + { type: 'array', items: { oneOf: [{ type: 'string' }, { type: 'integer' }] } }, + ], + description: 'The value to search for', + example: '1306054154', + }, + }, + required: ['field', 'operator', 'value'], + }, + { + title: 'Multiple filter search', + type: 'object', + properties: { + operator: { + type: 'string', + enum: ['AND', 'OR'], + description: 'Boolean operator to combine multiple filters', + example: 'AND', + }, + value: { + type: 'array', + description: 'Array of filter objects', + items: { + type: 'object', + properties: { + field: { type: 'string' }, + operator: { + type: 'string', + enum: ['=', '!=', 'IN', 'NIN', '<', '>', '~', '!~', '^', '$'], + }, + value: { + oneOf: [ + { type: 'string' }, + { type: 'integer' }, + { + type: 'array', + items: { oneOf: [{ type: 'string' }, { type: 'integer' }] }, + }, + ], + }, + }, + required: ['field', 'operator', 'value'], + }, + }, + }, + required: ['operator', 'value'], + }, + ], + }, + pagination: { + type: 'object', + description: 'Pagination options', + properties: { + per_page: { + type: 'integer', + description: 'Number of results per page (default: 20, max: 150)', + minimum: 1, + maximum: 150, + default: 20, + }, + starting_after: { + type: 'string', + description: 'Cursor for pagination', + nullable: true, + }, + }, + }, + }, + required: ['query'], + }, +}; + +/** + * Reply to a conversation + */ +const REPLY_CONVERSATION_TOOL: Tool = { + name: 'intercom_reply_conversation', + description: + 'Reply to a conversation with a message from an admin or on behalf of a contact, or with a note for admins.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: + 'The Intercom provisioned identifier for the conversation or the string "last" to reply to the last part of the conversation', + example: '123', + }, + message_type: { + type: 'string', + description: 'The type of message being sent', + enum: ['comment', 'note', 'quick_reply'], + }, + type: { + type: 'string', + description: 'The type of reply - admin or user', + enum: ['admin', 'user'], + }, + admin_id: { + type: 'string', + description: 'The id of the admin who is replying to the conversation', + example: '991266214', + }, + intercom_user_id: { + type: 'string', + description: 'The Intercom user id for user replies', + }, + body: { + type: 'string', + description: 'The content of the reply', + example: 'Thanks for reaching out!', + }, + attachment_urls: { + type: 'array', + description: 'A list of image URLs that will be added as attachments', + items: { + type: 'string', + format: 'uri', + }, + }, + }, + required: ['id'], + }, +}; + +/** + * Manage a conversation (close, snooze, open, assign) + */ +const MANAGE_CONVERSATION_TOOL: Tool = { + name: 'intercom_manage_conversation', + description: 'Perform management actions on a conversation: close, snooze, open, or assign.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The id of the conversation to target', + example: 123, + }, + message_type: { + type: 'string', + description: 'The type of management action to perform', + enum: ['close', 'snoozed', 'open', 'assignment'], + }, + admin_id: { + type: 'string', + description: 'The id of the admin who is performing the action', + example: '5017690', + }, + assignee_id: { + type: 'string', + description: + 'The id of the admin or team to assign the conversation to (for assignment type)', + example: '991266214', + }, + type: { + type: 'string', + description: 'Type of assignee - admin or team', + enum: ['admin', 'team'], + }, + body: { + type: 'string', + description: 'Message body for close action', + example: 'Goodbye :)', + }, + snoozed_until: { + type: 'integer', + format: 'timestamp', + description: 'The time you want the conversation to reopen (for snooze action)', + example: 1673609604, + }, + }, + required: ['id', 'message_type', 'admin_id'], + }, +}; + +/** + * Attach a contact to a conversation + */ +const ATTACH_CONTACT_TO_CONVERSATION_TOOL: Tool = { + name: 'intercom_attach_contact_to_conversation', + description: + 'Add participants who are contacts to a conversation, on behalf of either another contact or an admin.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The identifier for the conversation as given by Intercom', + example: '123', + }, + admin_id: { + type: 'string', + description: 'The id of the admin performing the action', + example: '991266214', + }, + customer: { + type: 'object', + description: 'The contact to add to the conversation', + properties: { + intercom_user_id: { + type: 'string', + description: 'The Intercom user id of the contact to add', + example: '677c55ef6abd011ad17ff541', + }, + }, + required: ['intercom_user_id'], + }, + }, + required: ['id', 'admin_id', 'customer'], + }, +}; + +/** + * Detach a contact from a conversation + */ +const DETACH_CONTACT_FROM_CONVERSATION_TOOL: Tool = { + name: 'intercom_detach_contact_from_conversation', + description: 'Remove a contact from a group conversation.', + inputSchema: { + type: 'object', + properties: { + conversation_id: { + type: 'string', + description: 'The identifier for the conversation as given by Intercom', + example: '123', + }, + contact_id: { + type: 'string', + description: 'The identifier for the contact as given by Intercom', + example: '123', + }, + admin_id: { + type: 'string', + description: 'The id of the admin performing the action', + example: '991266214', + }, + customer: { + type: 'object', + description: 'The contact to remove from the conversation', + properties: { + intercom_user_id: { + type: 'string', + description: 'The Intercom user id of the contact to remove', + example: '677c55ef6abd011ad17ff541', + }, + }, + required: ['intercom_user_id'], + }, + }, + required: ['conversation_id', 'contact_id', 'admin_id', 'customer'], + }, +}; + +/** + * Redact a conversation part + */ +const REDACT_CONVERSATION_TOOL: Tool = { + name: 'intercom_redact_conversation', + description: 'Redact a conversation part or the source message of a conversation.', + inputSchema: { + type: 'object', + properties: { + type: { + type: 'string', + description: 'The type of resource being redacted', + enum: ['conversation_part', 'source'], + example: 'conversation_part', + }, + conversation_id: { + type: 'string', + description: 'The id of the conversation', + example: '19894788788', + }, + conversation_part_id: { + type: 'string', + description: 'The id of the conversation_part (required when type is conversation_part)', + example: '19381789428', + }, + source_id: { + type: 'string', + description: 'The id of the source (required when type is source)', + example: '19894781231', + }, + }, + required: ['type', 'conversation_id'], + }, +}; + +/** + * Convert a conversation to a ticket + */ +const CONVERT_CONVERSATION_TO_TICKET_TOOL: Tool = { + name: 'intercom_convert_conversation_to_ticket', + description: 'Convert a conversation to a ticket.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The id of the conversation to target', + example: 123, + }, + ticket_type_id: { + type: 'string', + description: 'The ID of the type of ticket you want to convert the conversation to', + example: '1234', + }, + attributes: { + type: 'object', + description: 'The attributes set on the ticket', + additionalProperties: true, + }, + }, + required: ['id', 'ticket_type_id'], + }, +}; + +export const CONVERSATION_TOOLS = [ + LIST_CONVERSATIONS_TOOL, + GET_CONVERSATION_TOOL, + CREATE_CONVERSATION_TOOL, + UPDATE_CONVERSATION_TOOL, + DELETE_CONVERSATION_TOOL, + SEARCH_CONVERSATIONS_TOOL, + REPLY_CONVERSATION_TOOL, + MANAGE_CONVERSATION_TOOL, + ATTACH_CONTACT_TO_CONVERSATION_TOOL, + DETACH_CONTACT_FROM_CONVERSATION_TOOL, + REDACT_CONVERSATION_TOOL, + CONVERT_CONVERSATION_TO_TICKET_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/index.ts b/mcp_servers/intercom/src/tools/definitions/index.ts new file mode 100644 index 00000000..b5539fbf --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/index.ts @@ -0,0 +1,95 @@ +// Export all tool definitions +export * from './contactTools.js'; +export * from './conversationTools.js'; +export * from './companyTools.js'; +export * from './articleTools.js'; +export * from './messageTools.js'; +export * from './tagTools.js'; +export * from './teamTools.js'; + +export type IntercomToolName = + // Contact tools + | 'intercom_list_contacts' + | 'intercom_get_contact' + | 'intercom_create_contact' + | 'intercom_update_contact' + | 'intercom_search_contacts' + | 'intercom_delete_contact' + | 'intercom_merge_contact' + | 'intercom_list_contact_notes' + | 'intercom_create_contact_note' + | 'intercom_list_contact_tags' + | 'intercom_add_contact_tag' + | 'intercom_remove_contact_tag' + // Conversation tools + | 'intercom_list_conversations' + | 'intercom_get_conversation' + | 'intercom_create_conversation' + | 'intercom_update_conversation' + | 'intercom_delete_conversation' + | 'intercom_search_conversations' + | 'intercom_reply_conversation' + | 'intercom_manage_conversation' + | 'intercom_attach_contact_to_conversation' + | 'intercom_detach_contact_from_conversation' + | 'intercom_redact_conversation' + | 'intercom_convert_conversation_to_ticket' + // Company tools + | 'intercom_list_companies' + | 'intercom_get_company' + | 'intercom_create_company' + | 'intercom_update_company' + | 'intercom_delete_company' + | 'intercom_find_company' + | 'intercom_list_company_users' + | 'intercom_attach_contact_to_company' + | 'intercom_detach_contact_from_company' + | 'intercom_list_company_segments' + | 'intercom_list_company_tags' + | 'intercom_tag_company' + | 'intercom_untag_company' + // Article tools + | 'intercom_list_articles' + | 'intercom_get_article' + | 'intercom_create_article' + | 'intercom_update_article' + | 'intercom_delete_article' + | 'intercom_search_articles' + | 'intercom_list_collections' + | 'intercom_get_collection' + | 'intercom_create_collection' + | 'intercom_update_collection' + | 'intercom_delete_collection' + // Message tools + | 'intercom_create_message' + | 'intercom_list_messages' + | 'intercom_get_message' + | 'intercom_create_note' + | 'intercom_list_notes' + | 'intercom_get_note' + | 'intercom_send_user_message' + // Tag tools + | 'intercom_list_tags' + | 'intercom_get_tag' + | 'intercom_create_or_update_tag' + | 'intercom_tag_companies' + | 'intercom_untag_companies' + | 'intercom_tag_users' + | 'intercom_delete_tag' + // Team tools + | 'intercom_list_teams' + | 'intercom_get_team' + | 'intercom_list_admins' + | 'intercom_get_admin' + | 'intercom_get_current_admin' + | 'intercom_set_admin_away' + | 'intercom_list_admin_activity_logs'; + +export type AllIntercomTools = + | (typeof import('./contactTools.js').CONTACT_TOOLS)[number] + | (typeof import('./conversationTools.js').CONVERSATION_TOOLS)[number] + | (typeof import('./companyTools.js').COMPANY_TOOLS)[number] + | (typeof import('./articleTools.js').ARTICLE_TOOLS)[number] + | (typeof import('./messageTools.js').MESSAGE_TOOLS)[number] + | (typeof import('./tagTools.js').TAG_TOOLS)[number] + | (typeof import('./teamTools.js').TEAM_TOOLS)[number]; diff --git a/mcp_servers/intercom/src/tools/definitions/messageTools.ts b/mcp_servers/intercom/src/tools/definitions/messageTools.ts new file mode 100644 index 00000000..981cd26e --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/messageTools.ts @@ -0,0 +1,284 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * Create a message + */ +const CREATE_MESSAGE_TOOL: Tool = { + name: 'intercom_create_message', + description: + 'Create a message that has been initiated by an admin. The conversation can be an in-app message or an email.', + inputSchema: { + type: 'object', + properties: { + message_type: { + type: 'string', + description: 'The kind of message being created.', + enum: ['in_app', 'email'], + example: 'in_app', + }, + subject: { + type: 'string', + description: 'The title of the email (required if message_type is email).', + example: 'Thanks for everything', + }, + body: { + type: 'string', + description: 'The content of the message. HTML and plaintext are supported.', + example: 'Hello there', + }, + template: { + type: 'string', + description: 'The style of the outgoing message. Possible values: plain or personal.', + enum: ['plain', 'personal'], + example: 'plain', + }, + from: { + type: 'object', + description: 'The sender of the message. If not provided, the default sender will be used.', + properties: { + type: { + type: 'string', + description: 'Always admin.', + enum: ['admin'], + example: 'admin', + }, + id: { + type: 'integer', + description: 'The identifier for the admin which is given by Intercom.', + example: 394051, + }, + }, + required: ['type', 'id'], + }, + to: { + type: 'object', + description: 'The recipient of the message.', + properties: { + type: { + type: 'string', + description: 'The role associated to the contact - user or lead.', + enum: ['user', 'lead'], + example: 'user', + }, + id: { + type: 'string', + description: 'The identifier for the contact which is given by Intercom.', + example: '536e564f316c83104c000020', + }, + }, + required: ['type', 'id'], + }, + created_at: { + type: 'integer', + format: 'date-time', + description: + 'The time the message was created. If not provided, the current time will be used.', + example: 1590000000, + }, + create_conversation_without_contact_reply: { + type: 'boolean', + description: + 'Whether a conversation should be opened in the inbox for the message without the contact replying. Defaults to false if not provided.', + default: false, + example: true, + }, + }, + anyOf: [ + { + title: 'Email message', + required: ['message_type', 'subject', 'body', 'template', 'from', 'to'], + properties: { + message_type: { enum: ['email'] }, + }, + }, + { + title: 'In-app message', + required: ['message_type', 'body', 'from', 'to'], + properties: { + message_type: { enum: ['in_app'] }, + }, + }, + ], + }, +}; + +/** + * List messages + */ +const LIST_MESSAGES_TOOL: Tool = { + name: 'intercom_list_messages', + description: 'List all messages sent from your workspace with pagination support.', + inputSchema: { + type: 'object', + properties: { + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: [], + }, +}; + +/** + * Get a message by ID + */ +const GET_MESSAGE_TOOL: Tool = { + name: 'intercom_get_message', + description: 'Retrieve a specific message by its ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the message which is given by Intercom', + example: '2001', + }, + }, + required: ['id'], + }, +}; + +/** + * Create a note + */ +const CREATE_NOTE_TOOL: Tool = { + name: 'intercom_create_note', + description: 'Add a note to a specific contact.', + inputSchema: { + type: 'object', + properties: { + body: { + type: 'string', + description: 'The text of the note', + example: 'Customer called about pricing questions', + }, + contact_id: { + type: 'string', + description: + 'The unique identifier of the contact (lead or user) which is given by Intercom', + example: '677c55ef6abd011ad17ff541', + }, + admin_id: { + type: 'string', + description: 'The unique identifier of the admin creating the note', + example: '991266214', + }, + }, + required: ['body', 'contact_id', 'admin_id'], + }, +}; + +/** + * List notes for a contact + */ +const LIST_NOTES_TOOL: Tool = { + name: 'intercom_list_notes', + description: 'List all notes attached to a specific contact.', + inputSchema: { + type: 'object', + properties: { + contact_id: { + type: 'string', + description: 'The unique identifier for the contact which is given by Intercom', + example: '677c55ef6abd011ad17ff541', + }, + starting_after: { + type: 'string', + description: 'The cursor to use in pagination for retrieving the next page of results', + }, + per_page: { + type: 'integer', + description: 'Number of results per page (default: 50, max: 150)', + minimum: 1, + maximum: 150, + default: 50, + }, + }, + required: ['contact_id'], + }, +}; + +/** + * Get a note by ID + */ +const GET_NOTE_TOOL: Tool = { + name: 'intercom_get_note', + description: 'Retrieve a specific note by its ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the note which is given by Intercom', + example: '17495962', + }, + }, + required: ['id'], + }, +}; + +/** + * Send a user-initiated message + */ +const SEND_USER_MESSAGE_TOOL: Tool = { + name: 'intercom_send_user_message', + description: + 'Create a conversation that has been initiated by a contact (user or lead). The conversation can be an in-app message only.', + inputSchema: { + type: 'object', + properties: { + from: { + type: 'object', + description: 'The contact who initiated the conversation', + properties: { + type: { + type: 'string', + enum: ['lead', 'user', 'contact'], + description: 'The role associated to the contact - user, lead, or contact', + example: 'user', + }, + id: { + type: 'string', + description: 'The identifier for the contact which is given by Intercom', + format: 'uuid', + minLength: 24, + maxLength: 24, + example: '536e564f316c83104c000020', + }, + }, + required: ['type', 'id'], + }, + body: { + type: 'string', + description: 'The content of the message. HTML is not supported', + example: 'Hello, I need help with my order', + }, + created_at: { + type: 'integer', + format: 'date-time', + description: + 'The time the conversation was created as a UTC Unix timestamp. If not provided, the current time will be used', + example: 1671028894, + }, + }, + required: ['from', 'body'], + }, +}; + +export const MESSAGE_TOOLS = [ + CREATE_MESSAGE_TOOL, + LIST_MESSAGES_TOOL, + GET_MESSAGE_TOOL, + CREATE_NOTE_TOOL, + LIST_NOTES_TOOL, + GET_NOTE_TOOL, + SEND_USER_MESSAGE_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/tagTools.ts b/mcp_servers/intercom/src/tools/definitions/tagTools.ts new file mode 100644 index 00000000..d20a07a6 --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/tagTools.ts @@ -0,0 +1,208 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all tags + */ +const LIST_TAGS_TOOL: Tool = { + name: 'intercom_list_tags', + description: 'List all tags in your Intercom workspace.', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, +}; + +/** + * Find a specific tag + */ +const GET_TAG_TOOL: Tool = { + name: 'intercom_get_tag', + description: 'Retrieve a specific tag by its ID.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier of a given tag', + example: '123', + }, + }, + required: ['id'], + }, +}; + +/** + * Create or update a tag + */ +const CREATE_OR_UPDATE_TAG_TOOL: Tool = { + name: 'intercom_create_or_update_tag', + description: 'Create a new tag or update an existing tag by name or ID.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: + 'The name of the tag, which will be created if not found, or the new name for the tag if this is an update request. Names are case insensitive.', + example: 'Independent', + }, + id: { + type: 'string', + description: 'The id of tag to update (optional, only for updates)', + example: '656452352', + }, + }, + required: ['name'], + }, +}; + +/** + * Tag companies + */ +const TAG_COMPANIES_TOOL: Tool = { + name: 'intercom_tag_companies', + description: + "Tag a single company or a list of companies. If the tag doesn't exist, a new one will be created automatically.", + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the tag, which will be created if not found', + example: 'Enterprise', + }, + companies: { + type: 'array', + description: 'Array of companies to tag', + items: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The Intercom defined id representing the company', + example: '531ee472cce572a6ec000006', + }, + company_id: { + type: 'string', + description: 'The company id you have defined for the company', + example: '6', + }, + }, + oneOf: [{ required: ['id'] }, { required: ['company_id'] }], + }, + }, + }, + required: ['name', 'companies'], + }, +}; + +/** + * Untag companies + */ +const UNTAG_COMPANIES_TOOL: Tool = { + name: 'intercom_untag_companies', + description: 'Remove a tag from a single company or a list of companies.', + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the tag which will be removed from the companies', + example: 'Enterprise', + }, + companies: { + type: 'array', + description: 'Array of companies to untag', + items: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The Intercom defined id representing the company', + example: '531ee472cce572a6ec000006', + }, + company_id: { + type: 'string', + description: 'The company id you have defined for the company', + example: '6', + }, + }, + oneOf: [{ required: ['id'] }, { required: ['company_id'] }], + }, + }, + untag: { + type: 'boolean', + description: 'Always set to true for untag operations', + enum: [true], + default: true, + }, + }, + required: ['name', 'companies', 'untag'], + }, +}; + +/** + * Tag multiple users + */ +const TAG_USERS_TOOL: Tool = { + name: 'intercom_tag_users', + description: + "Tag a list of users/contacts. If the tag doesn't exist, a new one will be created automatically.", + inputSchema: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'The name of the tag, which will be created if not found', + example: 'VIP Customer', + }, + users: { + type: 'array', + description: 'Array of users to tag', + items: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The Intercom defined id representing the user', + example: '5f7f0d217289f8d2f4262080', + }, + }, + required: ['id'], + }, + }, + }, + required: ['name', 'users'], + }, +}; + +/** + * Delete a tag + */ +const DELETE_TAG_TOOL: Tool = { + name: 'intercom_delete_tag', + description: + 'Delete a tag from your workspace. Note: tags with dependent objects (like segments) cannot be deleted.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier of a given tag', + example: '123', + }, + }, + required: ['id'], + }, +}; + +export const TAG_TOOLS = [ + LIST_TAGS_TOOL, + GET_TAG_TOOL, + CREATE_OR_UPDATE_TAG_TOOL, + TAG_COMPANIES_TOOL, + UNTAG_COMPANIES_TOOL, + TAG_USERS_TOOL, + DELETE_TAG_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/definitions/teamTools.ts b/mcp_servers/intercom/src/tools/definitions/teamTools.ts new file mode 100644 index 00000000..670d6cc1 --- /dev/null +++ b/mcp_servers/intercom/src/tools/definitions/teamTools.ts @@ -0,0 +1,147 @@ +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +/** + * List all teams + */ +const LIST_TEAMS_TOOL: Tool = { + name: 'intercom_list_teams', + description: 'List all teams in your Intercom workspace.', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, +}; + +/** + * Get a specific team by ID + */ +const GET_TEAM_TOOL: Tool = { + name: 'intercom_get_team', + description: + 'Retrieve a specific team by its ID, containing an array of admins that belong to this team.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier of a given team', + example: '123', + }, + }, + required: ['id'], + }, +}; + +/** + * List all admins + */ +const LIST_ADMINS_TOOL: Tool = { + name: 'intercom_list_admins', + description: 'List all admins (teammates) in your Intercom workspace.', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, +}; + +/** + * Get a specific admin by ID + */ +const GET_ADMIN_TOOL: Tool = { + name: 'intercom_get_admin', + description: 'Retrieve the details of a single admin (teammate).', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The unique identifier of a given admin', + example: 123, + }, + }, + required: ['id'], + }, +}; + +/** + * Get current admin (me) + */ +const GET_CURRENT_ADMIN_TOOL: Tool = { + name: 'intercom_get_current_admin', + description: + 'Identify the currently authorized admin along with the embedded app object (workspace).', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, +}; + +/** + * Set admin away status + */ +const SET_ADMIN_AWAY_TOOL: Tool = { + name: 'intercom_set_admin_away', + description: 'Set an admin as away for the Inbox, with options for reassigning conversations.', + inputSchema: { + type: 'object', + properties: { + id: { + type: 'integer', + description: 'The unique identifier of a given admin', + example: 123, + }, + away_mode_enabled: { + type: 'boolean', + description: 'Set to true to change the status of the admin to away', + example: true, + default: true, + }, + away_mode_reassign: { + type: 'boolean', + description: 'Set to true to assign any new conversation replies to your default inbox', + example: false, + default: false, + }, + }, + required: ['id', 'away_mode_enabled', 'away_mode_reassign'], + }, +}; + +/** + * List admin activity logs + */ +const LIST_ADMIN_ACTIVITY_LOGS_TOOL: Tool = { + name: 'intercom_list_admin_activity_logs', + description: 'Get a log of activities by all admins in an app within a specified timeframe.', + inputSchema: { + type: 'object', + properties: { + created_at_after: { + type: 'string', + description: + 'The start date that you request data for. It must be formatted as a UNIX timestamp.', + example: '1677253093', + }, + created_at_before: { + type: 'string', + description: + 'The end date that you request data for. It must be formatted as a UNIX timestamp.', + example: '1677861493', + }, + }, + required: ['created_at_after'], + }, +}; + +export const TEAM_TOOLS = [ + LIST_TEAMS_TOOL, + GET_TEAM_TOOL, + LIST_ADMINS_TOOL, + GET_ADMIN_TOOL, + GET_CURRENT_ADMIN_TOOL, + SET_ADMIN_AWAY_TOOL, + LIST_ADMIN_ACTIVITY_LOGS_TOOL, +] as const; diff --git a/mcp_servers/intercom/src/tools/handlers/articleHandler.ts b/mcp_servers/intercom/src/tools/handlers/articleHandler.ts new file mode 100644 index 00000000..0dcecd1b --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/articleHandler.ts @@ -0,0 +1,307 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { validateArticleId, validateRequiredFields } from '../../utils/validation.js'; + +export class ArticleHandler { + constructor(private intercomClient: IntercomClient) {} + + async listArticles(data: { startingAfter?: string; perPage?: number }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + const queryString = params.toString(); + const endpoint = queryString ? `/articles?${queryString}` : '/articles'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getArticle(articleId: number): Promise { + if (!validateArticleId(articleId.toString())) { + throw new Error('Invalid article ID provided'); + } + + return this.intercomClient.makeRequest(`/articles/${articleId}`, { + method: 'GET', + }); + } + + async createArticle(data: { + title: string; + description?: string; + body?: string; + authorId: number; + state?: 'published' | 'draft'; + parentId?: number; + parentType?: 'collection' | 'section'; + translatedContent?: Record< + string, + { + title?: string; + description?: string; + body?: string; + authorId?: number; + state?: 'published' | 'draft'; + } + >; + }): Promise { + const validation = validateRequiredFields(data, ['title', 'authorId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + title: data.title, + author_id: data.authorId, + }; + + if (data.description !== undefined) payload.description = data.description; + if (data.body !== undefined) payload.body = data.body; + if (data.state !== undefined) payload.state = data.state; + if (data.parentId !== undefined) payload.parent_id = data.parentId; + if (data.parentType !== undefined) payload.parent_type = data.parentType; + + if (data.translatedContent !== undefined) { + payload.translated_content = {}; + for (const [locale, content] of Object.entries(data.translatedContent)) { + payload.translated_content[locale] = {}; + if (content.title !== undefined) payload.translated_content[locale].title = content.title; + if (content.description !== undefined) + payload.translated_content[locale].description = content.description; + if (content.body !== undefined) payload.translated_content[locale].body = content.body; + if (content.authorId !== undefined) + payload.translated_content[locale].author_id = content.authorId; + if (content.state !== undefined) payload.translated_content[locale].state = content.state; + } + } + + return this.intercomClient.makeRequest('/articles', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async updateArticle( + articleId: number, + data: { + title?: string; + description?: string; + body?: string; + authorId?: number; + state?: 'published' | 'draft'; + parentId?: string; + parentType?: 'collection' | 'section'; + translatedContent?: Record< + string, + { + title?: string; + description?: string; + body?: string; + authorId?: number; + state?: 'published' | 'draft'; + } + >; + }, + ): Promise { + if (!validateArticleId(articleId.toString())) { + throw new Error('Invalid article ID provided'); + } + + const payload: any = {}; + + if (data.title !== undefined) payload.title = data.title; + if (data.description !== undefined) payload.description = data.description; + if (data.body !== undefined) payload.body = data.body; + if (data.authorId !== undefined) payload.author_id = data.authorId; + if (data.state !== undefined) payload.state = data.state; + if (data.parentId !== undefined) payload.parent_id = data.parentId; + if (data.parentType !== undefined) payload.parent_type = data.parentType; + + if (data.translatedContent !== undefined) { + payload.translated_content = {}; + for (const [locale, content] of Object.entries(data.translatedContent)) { + payload.translated_content[locale] = {}; + if (content.title !== undefined) payload.translated_content[locale].title = content.title; + if (content.description !== undefined) + payload.translated_content[locale].description = content.description; + if (content.body !== undefined) payload.translated_content[locale].body = content.body; + if (content.authorId !== undefined) + payload.translated_content[locale].author_id = content.authorId; + if (content.state !== undefined) payload.translated_content[locale].state = content.state; + } + } + + return this.intercomClient.makeRequest(`/articles/${articleId}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async deleteArticle(articleId: number): Promise { + if (!validateArticleId(articleId.toString())) { + throw new Error('Invalid article ID provided'); + } + + return this.intercomClient.makeRequest(`/articles/${articleId}`, { + method: 'DELETE', + }); + } + + async searchArticles(data: { + phrase?: string; + state?: 'published' | 'draft'; + authorId?: number; + parentId?: number; + parentType?: 'collection' | 'section'; + startingAfter?: string; + perPage?: number; + }): Promise { + const params = new URLSearchParams(); + + if (data.phrase) params.append('phrase', data.phrase); + if (data.state) params.append('state', data.state); + if (data.authorId) params.append('author_id', data.authorId.toString()); + if (data.parentId) params.append('parent_id', data.parentId.toString()); + if (data.parentType) params.append('parent_type', data.parentType); + if (data.startingAfter) params.append('starting_after', data.startingAfter); + if (data.perPage) params.append('per_page', data.perPage.toString()); + + const queryString = params.toString(); + const endpoint = queryString ? `/articles/search?${queryString}` : '/articles/search'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async listCollections(data: { startingAfter?: string; perPage?: number }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + const queryString = params.toString(); + const endpoint = queryString + ? `/help_center/collections?${queryString}` + : '/help_center/collections'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getCollection(collectionId: string): Promise { + if (!collectionId || typeof collectionId !== 'string') { + throw new Error('Invalid collection ID provided'); + } + + return this.intercomClient.makeRequest(`/help_center/collections/${collectionId}`, { + method: 'GET', + }); + } + + async createCollection(data: { + name: string; + description?: string; + parentId?: string; + helpCenterId?: number; + translatedContent?: Record< + string, + { + name?: string; + description?: string; + } + >; + }): Promise { + const validation = validateRequiredFields(data, ['name']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + name: data.name, + }; + + if (data.description !== undefined) payload.description = data.description; + if (data.parentId !== undefined) payload.parent_id = data.parentId; + if (data.helpCenterId !== undefined) payload.help_center_id = data.helpCenterId; + + if (data.translatedContent !== undefined) { + payload.translated_content = {}; + for (const [locale, content] of Object.entries(data.translatedContent)) { + payload.translated_content[locale] = {}; + if (content.name !== undefined) payload.translated_content[locale].name = content.name; + if (content.description !== undefined) + payload.translated_content[locale].description = content.description; + } + } + + return this.intercomClient.makeRequest('/help_center/collections', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async updateCollection( + collectionId: string, + data: { + name?: string; + description?: string; + parentId?: string; + translatedContent?: Record< + string, + { + name?: string; + description?: string; + } + >; + }, + ): Promise { + if (!collectionId || typeof collectionId !== 'string') { + throw new Error('Invalid collection ID provided'); + } + + const payload: any = {}; + + if (data.name !== undefined) payload.name = data.name; + if (data.description !== undefined) payload.description = data.description; + if (data.parentId !== undefined) payload.parent_id = data.parentId; + + if (data.translatedContent !== undefined) { + payload.translated_content = {}; + for (const [locale, content] of Object.entries(data.translatedContent)) { + payload.translated_content[locale] = {}; + if (content.name !== undefined) payload.translated_content[locale].name = content.name; + if (content.description !== undefined) + payload.translated_content[locale].description = content.description; + } + } + + return this.intercomClient.makeRequest(`/help_center/collections/${collectionId}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async deleteCollection(collectionId: string): Promise { + if (!collectionId || typeof collectionId !== 'string') { + throw new Error('Invalid collection ID provided'); + } + + return this.intercomClient.makeRequest(`/help_center/collections/${collectionId}`, { + method: 'DELETE', + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/companyHandler.ts b/mcp_servers/intercom/src/tools/handlers/companyHandler.ts new file mode 100644 index 00000000..b1d25f80 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/companyHandler.ts @@ -0,0 +1,302 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { + validateCompanyId, + validateContactId, + validateRequiredFields, + validateUrl, +} from '../../utils/validation.js'; + +export class CompanyHandler { + constructor(private intercomClient: IntercomClient) {} + + async listCompanies(data: { + startingAfter?: string; + perPage?: number; + order?: 'asc' | 'desc'; + }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + if (data.order) { + params.append('order', data.order); + } + + const queryString = params.toString(); + const endpoint = queryString ? `/companies?${queryString}` : '/companies'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getCompany(companyId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}`, { + method: 'GET', + }); + } + + async createCompany(data: { + name?: string; + companyId?: string; + plan?: string; + size?: number; + website?: string; + industry?: string; + remoteCreatedAt?: number; + monthlySpend?: number; + customAttributes?: Record; + }): Promise { + // At least one of name or companyId is required + if (!data.name && !data.companyId) { + throw new Error('At least one of name or company_id must be provided'); + } + + const payload: any = {}; + + if (data.name !== undefined) payload.name = data.name; + if (data.companyId !== undefined) payload.company_id = data.companyId; + if (data.plan !== undefined) payload.plan = data.plan; + if (data.size !== undefined) payload.size = data.size; + if (data.website !== undefined) { + if (data.website && !validateUrl(data.website)) { + throw new Error('Invalid website URL format provided'); + } + payload.website = data.website; + } + if (data.industry !== undefined) payload.industry = data.industry; + if (data.remoteCreatedAt !== undefined) payload.remote_created_at = data.remoteCreatedAt; + if (data.monthlySpend !== undefined) { + // Validate monthly spend is within integer limits + if (data.monthlySpend > 2147483647) { + throw new Error('Monthly spend exceeds maximum allowed value (2147483647)'); + } + payload.monthly_spend = Math.floor(data.monthlySpend); // Truncate to integer as per API spec + } + if (data.customAttributes !== undefined) payload.custom_attributes = data.customAttributes; + + return this.intercomClient.makeRequest('/companies', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async updateCompany( + companyId: string, + data: { + name?: string; + plan?: string; + size?: number; + website?: string; + industry?: string; + remoteCreatedAt?: number; + monthlySpend?: number; + customAttributes?: Record; + }, + ): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + const payload: any = {}; + + if (data.name !== undefined) payload.name = data.name; + if (data.plan !== undefined) payload.plan = data.plan; + if (data.size !== undefined) payload.size = data.size; + if (data.website !== undefined) { + if (data.website && !validateUrl(data.website)) { + throw new Error('Invalid website URL format provided'); + } + payload.website = data.website; + } + if (data.industry !== undefined) payload.industry = data.industry; + if (data.remoteCreatedAt !== undefined) payload.remote_created_at = data.remoteCreatedAt; + if (data.monthlySpend !== undefined) { + if (data.monthlySpend > 2147483647) { + throw new Error('Monthly spend exceeds maximum allowed value (2147483647)'); + } + payload.monthly_spend = Math.floor(data.monthlySpend); + } + if (data.customAttributes !== undefined) payload.custom_attributes = data.customAttributes; + + return this.intercomClient.makeRequest(`/companies/${companyId}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async deleteCompany(companyId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}`, { + method: 'DELETE', + }); + } + + async findCompany(externalCompanyId: string): Promise { + if (!externalCompanyId || typeof externalCompanyId !== 'string') { + throw new Error('Valid external company ID must be provided'); + } + + const params = new URLSearchParams(); + params.append('company_id', externalCompanyId); + + return this.intercomClient.makeRequest(`/companies?${params.toString()}`, { + method: 'GET', + }); + } + + async listCompanyUsers(companyId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}/users`, { + method: 'GET', + }); + } + + async attachContactToCompany(companyId: string, contactId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const payload = { + id: contactId, + }; + + return this.intercomClient.makeRequest(`/companies/${companyId}/contacts`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async detachContactFromCompany(companyId: string, contactId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}/contacts/${contactId}`, { + method: 'DELETE', + }); + } + + async listCompanySegments(companyId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}/segments`, { + method: 'GET', + }); + } + + async listCompanyTags(companyId: string): Promise { + if (!validateCompanyId(companyId)) { + throw new Error('Invalid company ID provided'); + } + + return this.intercomClient.makeRequest(`/companies/${companyId}/tags`, { + method: 'GET', + }); + } + + async tagCompany(data: { + name: string; + companies: Array<{ + id?: string; + companyId?: string; + }>; + }): Promise { + const validation = validateRequiredFields(data, ['name', 'companies']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!Array.isArray(data.companies) || data.companies.length === 0) { + throw new Error('At least one company must be provided'); + } + + // Validate each company has either id or companyId + for (const company of data.companies) { + if (!company.id && !company.companyId) { + throw new Error('Each company must have either id or company_id'); + } + } + + const payload = { + name: data.name, + companies: data.companies.map((company) => { + const companyData: any = {}; + if (company.id) companyData.id = company.id; + if (company.companyId) companyData.company_id = company.companyId; + return companyData; + }), + }; + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async untagCompany(data: { + name: string; + companies: Array<{ + id?: string; + companyId?: string; + }>; + }): Promise { + const validation = validateRequiredFields(data, ['name', 'companies']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!Array.isArray(data.companies) || data.companies.length === 0) { + throw new Error('At least one company must be provided'); + } + + // Validate each company has either id or companyId + for (const company of data.companies) { + if (!company.id && !company.companyId) { + throw new Error('Each company must have either id or company_id'); + } + } + + const payload = { + name: data.name, + companies: data.companies.map((company) => { + const companyData: any = {}; + if (company.id) companyData.id = company.id; + if (company.companyId) companyData.company_id = company.companyId; + return companyData; + }), + untag: true, + }; + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/contactHandler.ts b/mcp_servers/intercom/src/tools/handlers/contactHandler.ts new file mode 100644 index 00000000..0103d5b6 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/contactHandler.ts @@ -0,0 +1,260 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { + validateContactId, + validateEmail, + validateRequiredFields, + validatePaginationParams, +} from '../../utils/validation.js'; + +export class ContactHandler { + constructor(private intercomClient: IntercomClient) {} + + async listContacts(data: { startingAfter?: string; perPage?: number }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + const queryString = params.toString(); + const endpoint = queryString ? `/contacts?${queryString}` : '/contacts'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getContact(contactId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}`, { + method: 'GET', + }); + } + + async createContact(data: { + role?: 'user' | 'lead'; + externalId?: string; + email?: string; + phone?: string; + name?: string; + avatar?: string; + signedUpAt?: number; + lastSeenAt?: number; + ownerId?: number; + unsubscribedFromEmails?: boolean; + customAttributes?: Record; + }): Promise { + const payload: any = {}; + + // At least one of email, external_id, or role is required + if (!data.email && !data.externalId && !data.role) { + throw new Error('At least one of email, external_id, or role must be provided'); + } + + if (data.role !== undefined) payload.role = data.role; + if (data.externalId !== undefined) payload.external_id = data.externalId; + if (data.email !== undefined) { + if (!validateEmail(data.email)) { + throw new Error('Invalid email format provided'); + } + payload.email = data.email; + } + if (data.phone !== undefined) payload.phone = data.phone; + if (data.name !== undefined) payload.name = data.name; + if (data.avatar !== undefined) payload.avatar = data.avatar; + if (data.signedUpAt !== undefined) payload.signed_up_at = data.signedUpAt; + if (data.lastSeenAt !== undefined) payload.last_seen_at = data.lastSeenAt; + if (data.ownerId !== undefined) payload.owner_id = data.ownerId; + if (data.unsubscribedFromEmails !== undefined) + payload.unsubscribed_from_emails = data.unsubscribedFromEmails; + if (data.customAttributes !== undefined) payload.custom_attributes = data.customAttributes; + + return this.intercomClient.makeRequest('/contacts', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async updateContact( + contactId: string, + data: { + role?: 'user' | 'lead'; + externalId?: string; + email?: string; + phone?: string; + name?: string; + avatar?: string; + signedUpAt?: number; + lastSeenAt?: number; + ownerId?: number; + unsubscribedFromEmails?: boolean; + customAttributes?: Record; + }, + ): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const payload: any = {}; + + if (data.role !== undefined) payload.role = data.role; + if (data.externalId !== undefined) payload.external_id = data.externalId; + if (data.email !== undefined) { + if (!validateEmail(data.email)) { + throw new Error('Invalid email format provided'); + } + payload.email = data.email; + } + if (data.phone !== undefined) payload.phone = data.phone; + if (data.name !== undefined) payload.name = data.name; + if (data.avatar !== undefined) payload.avatar = data.avatar; + if (data.signedUpAt !== undefined) payload.signed_up_at = data.signedUpAt; + if (data.lastSeenAt !== undefined) payload.last_seen_at = data.lastSeenAt; + if (data.ownerId !== undefined) payload.owner_id = data.ownerId; + if (data.unsubscribedFromEmails !== undefined) + payload.unsubscribed_from_emails = data.unsubscribedFromEmails; + if (data.customAttributes !== undefined) payload.custom_attributes = data.customAttributes; + + return this.intercomClient.makeRequest(`/contacts/${contactId}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async deleteContact(contactId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}`, { + method: 'DELETE', + }); + } + + async searchContacts(data: { + query: any; + pagination?: { + perPage?: number; + startingAfter?: string; + }; + }): Promise { + const payload: any = { + query: data.query, + }; + + if (data.pagination) { + const validation = validatePaginationParams(undefined, data.pagination.perPage); + if (!validation.isValid) { + throw new Error(`Invalid pagination: ${validation.errors.join(', ')}`); + } + + payload.pagination = {}; + if (data.pagination.perPage) payload.pagination.per_page = data.pagination.perPage; + if (data.pagination.startingAfter) + payload.pagination.starting_after = data.pagination.startingAfter; + } + + return this.intercomClient.makeRequest('/contacts/search', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async mergeContact(fromContactId: string, intoContactId: string): Promise { + if (!validateContactId(fromContactId)) { + throw new Error('Invalid "from" contact ID provided'); + } + if (!validateContactId(intoContactId)) { + throw new Error('Invalid "into" contact ID provided'); + } + + const payload = { + from: fromContactId, + into: intoContactId, + }; + + return this.intercomClient.makeRequest('/contacts/merge', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async listContactNotes(contactId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}/notes`, { + method: 'GET', + }); + } + + async createContactNote( + contactId: string, + data: { + body: string; + adminId?: string; + }, + ): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const validation = validateRequiredFields(data, ['body']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + body: data.body, + contact_id: contactId, + }; + + if (data.adminId) { + payload.admin_id = data.adminId; + } + + return this.intercomClient.makeRequest('/notes', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async listContactTags(contactId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}/tags`, { + method: 'GET', + }); + } + + async addContactTag(contactId: string, tagId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}/tags`, { + method: 'POST', + body: JSON.stringify({ id: tagId }), + }); + } + + async removeContactTag(contactId: string, tagId: string): Promise { + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + return this.intercomClient.makeRequest(`/contacts/${contactId}/tags/${tagId}`, { + method: 'DELETE', + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/conversationHandler.ts b/mcp_servers/intercom/src/tools/handlers/conversationHandler.ts new file mode 100644 index 00000000..63089c85 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/conversationHandler.ts @@ -0,0 +1,372 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { + validateConversationId, + validateContactId, + validatePaginationParams, + validateRequiredFields, +} from '../../utils/validation.js'; + +export class ConversationHandler { + constructor(private intercomClient: IntercomClient) {} + + async listConversations(data: { + startingAfter?: string; + perPage?: number; + displayAs?: 'plaintext'; + }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + if (data.displayAs) { + params.append('display_as', data.displayAs); + } + + const queryString = params.toString(); + const endpoint = queryString ? `/conversations?${queryString}` : '/conversations'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getConversation(conversationId: number, displayAs?: 'plaintext'): Promise { + if (!validateConversationId(conversationId.toString())) { + throw new Error('Invalid conversation ID provided'); + } + + const params = new URLSearchParams(); + if (displayAs) { + params.append('display_as', displayAs); + } + + const queryString = params.toString(); + const endpoint = queryString + ? `/conversations/${conversationId}?${queryString}` + : `/conversations/${conversationId}`; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async createConversation(data: { + from: { + type: 'lead' | 'user' | 'contact'; + id: string; + }; + body: string; + createdAt?: number; + }): Promise { + const validation = validateRequiredFields(data, ['from', 'body']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateContactId(data.from.id)) { + throw new Error('Invalid contact ID in from field'); + } + + const payload: any = { + from: data.from, + body: data.body, + }; + + if (data.createdAt !== undefined) { + payload.created_at = data.createdAt; + } + + return this.intercomClient.makeRequest('/conversations', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async updateConversation( + conversationId: number, + data: { + displayAs?: 'plaintext'; + read?: boolean; + title?: string; + customAttributes?: Record; + }, + ): Promise { + if (!validateConversationId(conversationId.toString())) { + throw new Error('Invalid conversation ID provided'); + } + + const payload: any = {}; + + if (data.read !== undefined) payload.read = data.read; + if (data.title !== undefined) payload.title = data.title; + if (data.customAttributes !== undefined) payload.custom_attributes = data.customAttributes; + + const params = new URLSearchParams(); + if (data.displayAs) { + params.append('display_as', data.displayAs); + } + + const queryString = params.toString(); + const endpoint = queryString + ? `/conversations/${conversationId}?${queryString}` + : `/conversations/${conversationId}`; + + return this.intercomClient.makeRequest(endpoint, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async deleteConversation(conversationId: number): Promise { + if (!validateConversationId(conversationId.toString())) { + throw new Error('Invalid conversation ID provided'); + } + + return this.intercomClient.makeRequest(`/conversations/${conversationId}`, { + method: 'DELETE', + }); + } + + async searchConversations(data: { + query: any; + pagination?: { + perPage?: number; + startingAfter?: string; + }; + }): Promise { + const payload: any = { + query: data.query, + }; + + if (data.pagination) { + const validation = validatePaginationParams(undefined, data.pagination.perPage); + if (!validation.isValid) { + throw new Error(`Invalid pagination: ${validation.errors.join(', ')}`); + } + + payload.pagination = {}; + if (data.pagination.perPage) payload.pagination.per_page = data.pagination.perPage; + if (data.pagination.startingAfter) + payload.pagination.starting_after = data.pagination.startingAfter; + } + + return this.intercomClient.makeRequest('/conversations/search', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async replyToConversation( + conversationId: string, + data: { + messageType?: 'comment' | 'note' | 'quick_reply'; + type?: 'admin' | 'user'; + adminId?: string; + intercomUserId?: string; + body?: string; + attachmentUrls?: string[]; + }, + ): Promise { + if (!validateConversationId(conversationId)) { + throw new Error('Invalid conversation ID provided'); + } + + const payload: any = {}; + + if (data.messageType !== undefined) payload.message_type = data.messageType; + if (data.type !== undefined) payload.type = data.type; + if (data.adminId !== undefined) payload.admin_id = data.adminId; + if (data.intercomUserId !== undefined) payload.intercom_user_id = data.intercomUserId; + if (data.body !== undefined) payload.body = data.body; + if (data.attachmentUrls !== undefined) payload.attachment_urls = data.attachmentUrls; + + return this.intercomClient.makeRequest(`/conversations/${conversationId}/reply`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async manageConversation( + conversationId: number, + data: { + messageType: 'close' | 'snoozed' | 'open' | 'assignment'; + adminId: string; + assigneeId?: string; + type?: 'admin' | 'team'; + body?: string; + snoozedUntil?: number; + }, + ): Promise { + if (!validateConversationId(conversationId.toString())) { + throw new Error('Invalid conversation ID provided'); + } + + const validation = validateRequiredFields(data, ['messageType', 'adminId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + message_type: data.messageType, + admin_id: data.adminId, + }; + + if (data.assigneeId !== undefined) payload.assignee_id = data.assigneeId; + if (data.type !== undefined) payload.type = data.type; + if (data.body !== undefined) payload.body = data.body; + if (data.snoozedUntil !== undefined) payload.snoozed_until = data.snoozedUntil; + + return this.intercomClient.makeRequest(`/conversations/${conversationId}/reply`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async attachContactToConversation( + conversationId: string, + data: { + adminId: string; + customer: { + intercomUserId: string; + }; + }, + ): Promise { + if (!validateConversationId(conversationId)) { + throw new Error('Invalid conversation ID provided'); + } + + const validation = validateRequiredFields(data, ['adminId', 'customer']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateContactId(data.customer.intercomUserId)) { + throw new Error('Invalid intercom user ID provided'); + } + + const payload = { + admin_id: data.adminId, + customer: { + intercom_user_id: data.customer.intercomUserId, + }, + }; + + return this.intercomClient.makeRequest(`/conversations/${conversationId}/customers`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async detachContactFromConversation( + conversationId: string, + contactId: string, + data: { + adminId: string; + customer: { + intercomUserId: string; + }; + }, + ): Promise { + if (!validateConversationId(conversationId)) { + throw new Error('Invalid conversation ID provided'); + } + + if (!validateContactId(contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const validation = validateRequiredFields(data, ['adminId', 'customer']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload = { + admin_id: data.adminId, + customer: { + intercom_user_id: data.customer.intercomUserId, + }, + }; + + return this.intercomClient.makeRequest( + `/conversations/${conversationId}/customers/${contactId}`, + { + method: 'DELETE', + body: JSON.stringify(payload), + }, + ); + } + + async redactConversation(data: { + type: 'conversation_part' | 'source'; + conversationId: string; + conversationPartId?: string; + sourceId?: string; + }): Promise { + const validation = validateRequiredFields(data, ['type', 'conversationId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateConversationId(data.conversationId)) { + throw new Error('Invalid conversation ID provided'); + } + + // Validate conditional required fields + if (data.type === 'conversation_part' && !data.conversationPartId) { + throw new Error('conversation_part_id is required when type is conversation_part'); + } + + if (data.type === 'source' && !data.sourceId) { + throw new Error('source_id is required when type is source'); + } + + const payload: any = { + type: data.type, + conversation_id: data.conversationId, + }; + + if (data.conversationPartId) payload.conversation_part_id = data.conversationPartId; + if (data.sourceId) payload.source_id = data.sourceId; + + return this.intercomClient.makeRequest('/conversations/redact', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async convertConversationToTicket( + conversationId: number, + data: { + ticketTypeId: string; + attributes?: Record; + }, + ): Promise { + if (!validateConversationId(conversationId.toString())) { + throw new Error('Invalid conversation ID provided'); + } + + const validation = validateRequiredFields(data, ['ticketTypeId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + ticket_type_id: data.ticketTypeId, + }; + + if (data.attributes !== undefined) { + payload.attributes = data.attributes; + } + + return this.intercomClient.makeRequest(`/conversations/${conversationId}/convert`, { + method: 'POST', + body: JSON.stringify(payload), + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/index.ts b/mcp_servers/intercom/src/tools/handlers/index.ts new file mode 100644 index 00000000..d7e3b512 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/index.ts @@ -0,0 +1,22 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { ContactHandler } from './contactHandler.js'; +import { ConversationHandler } from './conversationHandler.js'; +import { CompanyHandler } from './companyHandler.js'; +import { ArticleHandler } from './articleHandler.js'; +import { MessageHandler } from './messageHandler.js'; +import { TagHandler } from './tagHandler.js'; +import { TeamHandler } from './teamHandler.js'; + +export function createHandlers(intercomClient: IntercomClient) { + return { + contact: new ContactHandler(intercomClient), + conversation: new ConversationHandler(intercomClient), + company: new CompanyHandler(intercomClient), + article: new ArticleHandler(intercomClient), + message: new MessageHandler(intercomClient), + tag: new TagHandler(intercomClient), + team: new TeamHandler(intercomClient), + }; +} + +export type HandlerCollection = ReturnType; diff --git a/mcp_servers/intercom/src/tools/handlers/messageHandler.ts b/mcp_servers/intercom/src/tools/handlers/messageHandler.ts new file mode 100644 index 00000000..f516ba18 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/messageHandler.ts @@ -0,0 +1,189 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { + validateMessageId, + validateContactId, + validateRequiredFields, +} from '../../utils/validation.js'; + +export class MessageHandler { + constructor(private intercomClient: IntercomClient) {} + + async createMessage(data: { + messageType: 'in_app' | 'email'; + subject?: string; + body: string; + template?: 'plain' | 'personal'; + from: { + type: 'admin'; + id: number; + }; + to: { + type: 'user' | 'lead'; + id: string; + }; + createdAt?: number; + createConversationWithoutContactReply?: boolean; + }): Promise { + const validation = validateRequiredFields(data, ['messageType', 'body', 'from', 'to']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + // Email messages require subject + if (data.messageType === 'email' && !data.subject) { + throw new Error('Subject is required for email messages'); + } + + // Email messages require template + if (data.messageType === 'email' && !data.template) { + throw new Error('Template is required for email messages'); + } + + if (!validateContactId(data.to.id)) { + throw new Error('Invalid contact ID in "to" field'); + } + + const payload: any = { + message_type: data.messageType, + body: data.body, + from: data.from, + to: data.to, + }; + + if (data.subject !== undefined) payload.subject = data.subject; + if (data.template !== undefined) payload.template = data.template; + if (data.createdAt !== undefined) payload.created_at = data.createdAt; + if (data.createConversationWithoutContactReply !== undefined) { + payload.create_conversation_without_contact_reply = + data.createConversationWithoutContactReply; + } + + return this.intercomClient.makeRequest('/messages', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async listMessages(data: { startingAfter?: string; perPage?: number }): Promise { + const params = new URLSearchParams(); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + const queryString = params.toString(); + const endpoint = queryString ? `/messages?${queryString}` : '/messages'; + + return this.intercomClient.makeRequest(endpoint, { + method: 'GET', + }); + } + + async getMessage(messageId: string): Promise { + if (!validateMessageId(messageId)) { + throw new Error('Invalid message ID provided'); + } + + return this.intercomClient.makeRequest(`/messages/${messageId}`, { + method: 'GET', + }); + } + + async createNote(data: { body: string; contactId: string; adminId: string }): Promise { + const validation = validateRequiredFields(data, ['body', 'contactId', 'adminId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateContactId(data.contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const payload = { + body: data.body, + contact_id: data.contactId, + admin_id: data.adminId, + }; + + return this.intercomClient.makeRequest('/notes', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async listNotes(data: { + contactId: string; + startingAfter?: string; + perPage?: number; + }): Promise { + const validation = validateRequiredFields(data, ['contactId']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateContactId(data.contactId)) { + throw new Error('Invalid contact ID provided'); + } + + const params = new URLSearchParams(); + params.append('contact_id', data.contactId); + + if (data.startingAfter) { + params.append('starting_after', data.startingAfter); + } + + if (data.perPage) { + params.append('per_page', data.perPage.toString()); + } + + return this.intercomClient.makeRequest(`/notes?${params.toString()}`, { + method: 'GET', + }); + } + + async getNote(noteId: string): Promise { + if (!noteId || typeof noteId !== 'string') { + throw new Error('Invalid note ID provided'); + } + + return this.intercomClient.makeRequest(`/notes/${noteId}`, { + method: 'GET', + }); + } + + async sendUserMessage(data: { + from: { + type: 'lead' | 'user' | 'contact'; + id: string; + }; + body: string; + createdAt?: number; + }): Promise { + const validation = validateRequiredFields(data, ['from', 'body']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!validateContactId(data.from.id)) { + throw new Error('Invalid contact ID in "from" field'); + } + + const payload: any = { + from: data.from, + body: data.body, + }; + + if (data.createdAt !== undefined) { + payload.created_at = data.createdAt; + } + + return this.intercomClient.makeRequest('/conversations', { + method: 'POST', + body: JSON.stringify(payload), + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/tagHandler.ts b/mcp_servers/intercom/src/tools/handlers/tagHandler.ts new file mode 100644 index 00000000..8a1af671 --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/tagHandler.ts @@ -0,0 +1,168 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { + validateTagId, + validateContactId, + validateRequiredFields, +} from '../../utils/validation.js'; + +export class TagHandler { + constructor(private intercomClient: IntercomClient) {} + + async listTags(): Promise { + return this.intercomClient.makeRequest('/tags', { + method: 'GET', + }); + } + + async getTag(tagId: string): Promise { + if (!validateTagId(tagId)) { + throw new Error('Invalid tag ID provided'); + } + + return this.intercomClient.makeRequest(`/tags/${tagId}`, { + method: 'GET', + }); + } + + async createOrUpdateTag(data: { name: string; id?: string }): Promise { + const validation = validateRequiredFields(data, ['name']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload: any = { + name: data.name, + }; + + if (data.id !== undefined) { + payload.id = data.id; + } + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async tagCompanies(data: { + name: string; + companies: Array<{ + id?: string; + companyId?: string; + }>; + }): Promise { + const validation = validateRequiredFields(data, ['name', 'companies']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!Array.isArray(data.companies) || data.companies.length === 0) { + throw new Error('At least one company must be provided'); + } + + // Validate each company has either id or companyId + for (const company of data.companies) { + if (!company.id && !company.companyId) { + throw new Error('Each company must have either id or company_id'); + } + } + + const payload = { + name: data.name, + companies: data.companies.map((company) => { + const companyData: any = {}; + if (company.id) companyData.id = company.id; + if (company.companyId) companyData.company_id = company.companyId; + return companyData; + }), + }; + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async untagCompanies(data: { + name: string; + companies: Array<{ + id?: string; + companyId?: string; + }>; + }): Promise { + const validation = validateRequiredFields(data, ['name', 'companies']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!Array.isArray(data.companies) || data.companies.length === 0) { + throw new Error('At least one company must be provided'); + } + + // Validate each company has either id or companyId + for (const company of data.companies) { + if (!company.id && !company.companyId) { + throw new Error('Each company must have either id or company_id'); + } + } + + const payload = { + name: data.name, + companies: data.companies.map((company) => { + const companyData: any = {}; + if (company.id) companyData.id = company.id; + if (company.companyId) companyData.company_id = company.companyId; + return companyData; + }), + untag: true, + }; + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async tagUsers(data: { + name: string; + users: Array<{ + id: string; + }>; + }): Promise { + const validation = validateRequiredFields(data, ['name', 'users']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + if (!Array.isArray(data.users) || data.users.length === 0) { + throw new Error('At least one user must be provided'); + } + + // Validate each user has a valid ID + for (const user of data.users) { + if (!user.id || !validateContactId(user.id)) { + throw new Error('Each user must have a valid ID'); + } + } + + const payload = { + name: data.name, + users: data.users, + }; + + return this.intercomClient.makeRequest('/tags', { + method: 'POST', + body: JSON.stringify(payload), + }); + } + + async deleteTag(tagId: string): Promise { + if (!validateTagId(tagId)) { + throw new Error('Invalid tag ID provided'); + } + + return this.intercomClient.makeRequest(`/tags/${tagId}`, { + method: 'DELETE', + }); + } +} diff --git a/mcp_servers/intercom/src/tools/handlers/teamHandler.ts b/mcp_servers/intercom/src/tools/handlers/teamHandler.ts new file mode 100644 index 00000000..6ba38add --- /dev/null +++ b/mcp_servers/intercom/src/tools/handlers/teamHandler.ts @@ -0,0 +1,92 @@ +import { IntercomClient } from '../../client/intercomClient.js'; +import { validateTeamId, validateAdminId, validateRequiredFields } from '../../utils/validation.js'; + +export class TeamHandler { + constructor(private intercomClient: IntercomClient) {} + + async listTeams(): Promise { + return this.intercomClient.makeRequest('/teams', { + method: 'GET', + }); + } + + async getTeam(teamId: string): Promise { + if (!validateTeamId(teamId)) { + throw new Error('Invalid team ID provided'); + } + + return this.intercomClient.makeRequest(`/teams/${teamId}`, { + method: 'GET', + }); + } + + async listAdmins(): Promise { + return this.intercomClient.makeRequest('/admins', { + method: 'GET', + }); + } + + async getAdmin(adminId: number): Promise { + if (!validateAdminId(adminId.toString())) { + throw new Error('Invalid admin ID provided'); + } + + return this.intercomClient.makeRequest(`/admins/${adminId}`, { + method: 'GET', + }); + } + + async getCurrentAdmin(): Promise { + return this.intercomClient.makeRequest('/me', { + method: 'GET', + }); + } + + async setAdminAway( + adminId: number, + data: { + awayModeEnabled: boolean; + awayModeReassign: boolean; + }, + ): Promise { + if (!validateAdminId(adminId.toString())) { + throw new Error('Invalid admin ID provided'); + } + + const validation = validateRequiredFields(data, ['awayModeEnabled', 'awayModeReassign']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const payload = { + away_mode_enabled: data.awayModeEnabled, + away_mode_reassign: data.awayModeReassign, + }; + + return this.intercomClient.makeRequest(`/admins/${adminId}/away`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + } + + async listAdminActivityLogs(data: { + createdAtAfter: string; + createdAtBefore?: string; + }): Promise { + const validation = validateRequiredFields(data, ['createdAtAfter']); + if (!validation.isValid) { + throw new Error(`Missing required fields: ${validation.missingFields.join(', ')}`); + } + + const params = new URLSearchParams(); + params.append('created_at_after', data.createdAtAfter); + + if (data.createdAtBefore) { + params.append('created_at_before', data.createdAtBefore); + } + + return this.intercomClient.makeRequest(`/admins/activity_logs?${params.toString()}`, { + method: 'GET', + }); + } +} diff --git a/mcp_servers/intercom/src/tools/index.ts b/mcp_servers/intercom/src/tools/index.ts new file mode 100644 index 00000000..fe14d3aa --- /dev/null +++ b/mcp_servers/intercom/src/tools/index.ts @@ -0,0 +1,19 @@ +export * from './definitions/index.js'; +export * from './handlers/index.js'; + +import type { IntercomToolName, AllIntercomTools } from './definitions/index.js'; +import { createHandlers, type HandlerCollection } from './handlers/index.js'; +import { IntercomClient } from '../client/intercomClient.js'; + +/** + * Create a complete Intercom tools instance with both handlers and tool definitions + */ +export function createIntercomTools(intercomClient: IntercomClient) { + const handlers = createHandlers(intercomClient); + + return { + handlers, + }; +} + +export type { IntercomToolName, AllIntercomTools, HandlerCollection }; diff --git a/mcp_servers/intercom/src/transport/httpTransport.ts b/mcp_servers/intercom/src/transport/httpTransport.ts new file mode 100644 index 00000000..765845e2 --- /dev/null +++ b/mcp_servers/intercom/src/transport/httpTransport.ts @@ -0,0 +1,75 @@ +import express, { Request, Response, Router } from 'express'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { IntercomClient, asyncLocalStorage } from '../client/intercomClient.js'; +import { validateIntercomToken } from '../utils/validation.js'; +import { createErrorResponse } from '../utils/errors.js'; +import { getIntercomMcpServer } from '../server.js'; + +export class HttpTransport { + private router: Router; + + constructor() { + this.router = express.Router(); + this.setupRoutes(); + } + + private setupRoutes(): void { + this.router.post('/mcp', this.handleMcpRequest.bind(this)); + this.router.get('/mcp', this.handleGetMcp.bind(this)); + this.router.delete('/mcp', this.handleDeleteMcp.bind(this)); + } + + private async handleMcpRequest(req: Request, res: Response): Promise { + const accessToken = + process.env.INTERCOM_ACCESS_TOKEN || (req.headers['x-auth-token'] as string); + + if (!accessToken) { + res.status(401).json(createErrorResponse(-32001, 'Missing Intercom access token')); + return; + } + + if (!validateIntercomToken(accessToken)) { + res.status(401).json(createErrorResponse(-32001, 'Invalid token format')); + return; + } + + const intercomClient = new IntercomClient(accessToken); + + try { + const transport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + + const server = getIntercomMcpServer(); + await server.connect(transport); + + asyncLocalStorage.run({ intercomClient }, async () => { + await transport.handleRequest(req, res, req.body); + }); + + res.on('close', () => { + console.log('Request closed'); + transport.close(); + }); + } catch (error: any) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res + .status(500) + .json(createErrorResponse(-32603, `Internal server error: ${error.message}`)); + } + } + } + + private async handleGetMcp(_req: Request, res: Response): Promise { + res.status(405).json(createErrorResponse(-32000, 'Method not allowed')); + } + + private async handleDeleteMcp(_req: Request, res: Response): Promise { + res.status(405).json(createErrorResponse(-32000, 'Method not allowed')); + } + + getRouter(): Router { + return this.router; + } +} diff --git a/mcp_servers/intercom/src/transport/sseTransport.ts b/mcp_servers/intercom/src/transport/sseTransport.ts new file mode 100644 index 00000000..714da4b6 --- /dev/null +++ b/mcp_servers/intercom/src/transport/sseTransport.ts @@ -0,0 +1,128 @@ +import express, { Request, Response, Router } from 'express'; +import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { IntercomClient, asyncLocalStorage } from '../client/intercomClient.js'; +import { validateIntercomToken } from '../utils/validation.js'; +import { getIntercomMcpServer } from '../server.js'; + +export class SSETransport { + private router: Router; + private transports = new Map(); + + constructor() { + this.router = express.Router(); + this.setupRoutes(); + } + + private setupRoutes(): void { + this.router.get('/sse', this.handleSseConnection.bind(this)); + this.router.post('/messages', this.handleMessages.bind(this)); + this.router.delete('/sse/:sessionId', this.handleDeleteSession.bind(this)); + this.router.get('/sse/status', this.handleStatus.bind(this)); + } + + private async handleSseConnection(req: Request, res: Response): Promise { + try { + const accessToken = + process.env.INTERCOM_ACCESS_TOKEN || (req.headers['x-auth-token'] as string); + + if (!accessToken) { + res.status(401).json({ error: 'Missing Intercom access token' }); + return; + } + + const transport = new SSEServerTransport('/messages', res); + + res.on('close', async () => { + console.log(`SSE connection closed for session: ${transport.sessionId}`); + try { + this.transports.delete(transport.sessionId); + await transport.close(); + } catch (error) { + console.error('Error during SSE cleanup:', error); + } + }); + + res.on('error', (error) => { + console.error(`SSE connection error for session ${transport.sessionId}:`, error); + this.transports.delete(transport.sessionId); + }); + + this.transports.set(transport.sessionId, transport); + + const server = getIntercomMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with session: ${transport.sessionId}`); + } catch (error) { + console.error('Error establishing SSE connection:', error); + if (!res.headersSent) { + res.status(500).json({ error: 'Failed to establish SSE connection' }); + } + } + } + + private async handleMessages(req: Request, res: Response): Promise { + try { + const sessionId = req.query.sessionId as string; + + if (!sessionId) { + res.status(400).json({ error: 'Missing sessionId parameter' }); + return; + } + + const transport = this.transports.get(sessionId); + if (!transport) { + res.status(404).json({ error: 'Transport not found or session expired' }); + return; + } + + const accessToken = + process.env.INTERCOM_ACCESS_TOKEN || (req.headers['x-auth-token'] as string); + + if (!accessToken || !validateIntercomToken(accessToken)) { + res.status(401).json({ error: 'Invalid or missing access token' }); + return; + } + + const intercomClient = new IntercomClient(accessToken); + + await asyncLocalStorage.run({ intercomClient }, async () => { + await transport.handlePostMessage(req, res); + }); + } catch (error: any) { + console.error('Error handling message:', error); + if (!res.headersSent) { + res.status(500).json({ error: `Message handling failed: ${error.message}` }); + } + } + } + + private async handleDeleteSession(req: Request, res: Response): Promise { + const sessionId = req.params.sessionId; + const transport = this.transports.get(sessionId); + + if (transport) { + try { + await transport.close(); + this.transports.delete(sessionId); + res.status(200).json({ message: 'Session terminated' }); + } catch (error) { + res.status(500).json({ error: 'Failed to terminate session' }); + } + } else { + res.status(404).json({ error: 'Session not found' }); + } + } + + private handleStatus(_req: Request, res: Response): void { + res.json({ + activeConnections: this.transports.size, + sessions: Array.from(this.transports.keys()), + timestamp: new Date().toISOString(), + }); + } + + getRouter(): Router { + return this.router; + } +} diff --git a/mcp_servers/intercom/src/utils/errors.ts b/mcp_servers/intercom/src/utils/errors.ts new file mode 100644 index 00000000..ac69b02f --- /dev/null +++ b/mcp_servers/intercom/src/utils/errors.ts @@ -0,0 +1,80 @@ +export interface JsonRpcError { + jsonrpc: '2.0'; + error: { + code: number; + message: string; + data?: any; + }; + id: null; +} + +export function createErrorResponse(code: number, message: string, data?: any): JsonRpcError { + return { + jsonrpc: '2.0', + error: { + code, + message, + data, + }, + id: null, + }; +} + +// Intercom-specific error codes and helpers +export const IntercomErrorCodes = { + // Authentication errors + UNAUTHORIZED: 401, + FORBIDDEN: 403, + + // Resource errors + NOT_FOUND: 404, + CONFLICT: 409, + + // Rate limiting + RATE_LIMITED: 429, + + // Server errors + INTERNAL_SERVER_ERROR: 500, + SERVICE_UNAVAILABLE: 503, + + // Client errors + BAD_REQUEST: 400, + UNPROCESSABLE_ENTITY: 422, + + // Custom MCP errors + INVALID_CONTACT_ID: 1001, + INVALID_CONVERSATION_ID: 1002, + INVALID_COMPANY_ID: 1003, + INVALID_MESSAGE_ID: 1004, + INVALID_TAG_ID: 1005, + INVALID_ARTICLE_ID: 1006, + MISSING_REQUIRED_FIELD: 1007, + INVALID_SEARCH_QUERY: 1008, +} as const; + +// Helper functions for common Intercom errors +export function createUnauthorizedError(message = 'Invalid Intercom access token'): JsonRpcError { + return createErrorResponse(IntercomErrorCodes.UNAUTHORIZED, message); +} + +export function createNotFoundError(resource: string, id?: string): JsonRpcError { + const message = id ? `${resource} with ID ${id} not found` : `${resource} not found`; + return createErrorResponse(IntercomErrorCodes.NOT_FOUND, message); +} + +export function createRateLimitError(message = 'Intercom API rate limit exceeded'): JsonRpcError { + return createErrorResponse(IntercomErrorCodes.RATE_LIMITED, message); +} + +export function createValidationError(field: string, message?: string): JsonRpcError { + const errorMessage = message || `Invalid or missing required field: ${field}`; + return createErrorResponse(IntercomErrorCodes.MISSING_REQUIRED_FIELD, errorMessage, { field }); +} + +export function createIntercomApiError( + status: number, + statusText: string, + details?: any, +): JsonRpcError { + return createErrorResponse(status, `Intercom API error: ${status} ${statusText}`, details); +} diff --git a/mcp_servers/intercom/src/utils/validation.ts b/mcp_servers/intercom/src/utils/validation.ts new file mode 100644 index 00000000..93d887bd --- /dev/null +++ b/mcp_servers/intercom/src/utils/validation.ts @@ -0,0 +1,113 @@ +export function validateIntercomToken(token: string): boolean { + if (!token) return false; + + // Intercom tokens are typically Bearer tokens or direct access tokens + return token.startsWith('Bearer ') || token.match(/^[a-zA-Z0-9_-]+$/) !== null; +} + +export function validateContactId(contactId: string): boolean { + return typeof contactId === 'string' && contactId.length > 0; +} + +export function validateConversationId(conversationId: string): boolean { + return typeof conversationId === 'string' && conversationId.length > 0; +} + +export function validateCompanyId(companyId: string): boolean { + return typeof companyId === 'string' && companyId.length > 0; +} + +export function validateMessageId(messageId: string): boolean { + return typeof messageId === 'string' && messageId.length > 0; +} + +export function validateTagId(tagId: string): boolean { + return typeof tagId === 'string' && tagId.length > 0; +} + +export function validateArticleId(articleId: string): boolean { + return typeof articleId === 'string' && articleId.length > 0; +} + +export function validateTeamId(teamId: string): boolean { + return typeof teamId === 'string' && teamId.length > 0; +} + +export function validateAdminId(adminId: string): boolean { + return typeof adminId === 'string' && adminId.length > 0; +} + +export function validateEmail(email: string): boolean { + const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; + return typeof email === 'string' && emailRegex.test(email); +} + +export function validateUrl(url: string): boolean { + try { + new URL(url); + return true; + } catch { + return false; + } +} + +export function validatePaginationParams( + page?: number, + perPage?: number, +): { isValid: boolean; errors: string[] } { + const errors: string[] = []; + + if (page !== undefined) { + if (typeof page !== 'number' || page < 1) { + errors.push('Page must be a positive number starting from 1'); + } + } + + if (perPage !== undefined) { + if (typeof perPage !== 'number' || perPage < 1 || perPage > 150) { + errors.push('Per page must be between 1 and 150'); + } + } + + return { + isValid: errors.length === 0, + errors, + }; +} + +export function validateSearchQuery(query: string): boolean { + return typeof query === 'string' && query.trim().length > 0 && query.length <= 500; +} + +export function validateRequiredFields( + data: Record, + requiredFields: string[], +): { isValid: boolean; missingFields: string[] } { + const missingFields: string[] = []; + + for (const field of requiredFields) { + if (data[field] === undefined || data[field] === null || data[field] === '') { + missingFields.push(field); + } + } + + return { + isValid: missingFields.length === 0, + missingFields, + }; +} + +export function validateIntercomTimestamp(timestamp: number | string): boolean { + if (typeof timestamp === 'string') { + timestamp = parseInt(timestamp, 10); + } + + if (isNaN(timestamp)) return false; + + // Check if it's a valid Unix timestamp (between 1970 and reasonable future date) + const date = new Date(timestamp * 1000); + const now = new Date(); + const futureLimit = new Date(now.getFullYear() + 10, now.getMonth(), now.getDate()); + + return date.getTime() > 0 && date < futureLimit; +} diff --git a/mcp_servers/intercom/tsconfig.json b/mcp_servers/intercom/tsconfig.json new file mode 100644 index 00000000..11f7b8b9 --- /dev/null +++ b/mcp_servers/intercom/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "./dist", + "rootDir": "./src", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "removeComments": false, + "noImplicitAny": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "exactOptionalPropertyTypes": true, + "resolveJsonModule": true, + "isolatedModules": true, + "allowImportingTsExtensions": false, + "noEmit": false, + "lib": ["ES2022"] + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/mcp_servers/jira/Dockerfile b/mcp_servers/jira/Dockerfile new file mode 100644 index 00000000..71fcdd96 --- /dev/null +++ b/mcp_servers/jira/Dockerfile @@ -0,0 +1,32 @@ +FROM node:22.12-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/jira/package.json mcp_servers/jira/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/jira . + +# Build the application using TypeScript +RUN npm run build + +FROM node:22-alpine AS release + +COPY --from=builder /app/build /app/build +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +ENV NODE_ENV=production + +EXPOSE 5000 + +WORKDIR /app + +RUN npm ci --ignore-scripts --omit-dev + +ENTRYPOINT ["node", "build/index.js"] \ No newline at end of file diff --git a/mcp_servers/jira/README.md b/mcp_servers/jira/README.md new file mode 100644 index 00000000..aecc1b53 --- /dev/null +++ b/mcp_servers/jira/README.md @@ -0,0 +1,78 @@ +# Jira MCP Server + +A Model Context Protocol (MCP) server for Jira integration. Manage issues, projects, and workflows using Atlassian Jira's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Jira with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("JIRA", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/jira-mcp-server:latest + + +# Run Jira MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/jira-mcp-server:latest + + +# Run Jira MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_jira_api_token_here"}' \ + ghcr.io/klavis-ai/jira-mcp-server:latest +``` + +**OAuth Setup:** Jira requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Issue Management**: Create, read, update, and transition Jira issues +- **Project Operations**: Manage projects and project configurations +- **Workflow Management**: Handle issue workflows and status transitions +- **Search & Filter**: Search issues using JQL and saved filters +- **User Management**: Get user information and project permissions + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/jira/index.ts b/mcp_servers/jira/index.ts new file mode 100644 index 00000000..aa0c0d67 --- /dev/null +++ b/mcp_servers/jira/index.ts @@ -0,0 +1,1564 @@ +#!/usr/bin/env node + +import express, { Request, Response } from 'express'; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + CallToolRequest, + CallToolRequestSchema, + ListToolsRequestSchema, + Tool, +} from "@modelcontextprotocol/sdk/types.js"; +import { z } from 'zod'; +import { AsyncLocalStorage } from 'async_hooks'; +import dotenv from 'dotenv'; +import fetch, { RequestInit } from 'node-fetch'; + +// Load environment variables +dotenv.config(); + +// Default fields for Jira reads +const DEFAULT_READ_JIRA_FIELDS = [ + "summary", + "status", + "assignee", + "issuetype", + "priority", + "created", + "updated", + "description", + "labels" +]; + +// Define interfaces for Jira API client +interface JiraClient { + baseUrl: string; + cloudId: string; + authToken: string; + fetch: (path: string, options?: RequestInit) => Promise; +} + +// Type definitions for tool arguments +interface SearchIssuesArgs { + jql: string; + maxResults?: number; + fields?: string[]; +} + +interface CreateIssueArgs { + projectKey: string; + issueType: string; + summary: string; + description: string; + priority?: string; + assignee?: string; + labels?: string[]; + components?: string[]; + customFields?: Record; +} + +interface GetIssueArgs { + issueKey: string; + fields?: string[]; +} + +interface UpdateIssueArgs { + issueKey: string; + summary?: string; + description?: string; + status?: string; + priority?: string; + assignee?: string; + labels?: string[]; + customFields?: Record; +} + +interface AddCommentArgs { + issueKey: string; + comment: string; +} + +// Type definitions for tool arguments +interface JiraGetIssueArgs { + issue_key: string; + fields?: string; + expand?: string; + comment_limit?: number; + properties?: string; + update_history?: boolean; +} + +interface JiraSearchArgs { + jql: string; + fields?: string; + limit?: number; + startAt?: number; + projects_filter?: string; +} + +interface JiraSearchFieldsArgs { + keyword?: string; + limit?: number; + refresh?: boolean; +} + +interface JiraGetProjectIssuesArgs { + project_key: string; + limit?: number; + startAt?: number; +} + +interface JiraGetEpicIssuesArgs { + epic_key: string; + limit?: number; + startAt?: number; +} + +interface JiraGetSprintsFromBoardArgs { + board_id: string; + state?: string; + startAt?: number; + limit?: number; +} + +interface JiraCreateSprintArgs { + board_id: string; + sprint_name: string; + start_date: string; + end_date: string; + goal?: string; +} + +interface JiraGetSprintIssuesArgs { + sprint_id: string; + fields?: string; + startAt?: number; + limit?: number; +} + +interface JiraUpdateSprintArgs { + sprint_id: string; + sprint_name?: string; + state?: string; + start_date?: string; + end_date?: string; + goal?: string; +} + +interface JiraCreateIssueArgs { + project_key: string; + summary: string; + issue_type: string; + assignee?: string; + description?: string; + components?: string; + additional_fields?: string; +} + +interface JiraUpdateIssueArgs { + issue_key: string; + fields: string; + additional_fields?: string; + attachments?: string; +} + +interface JiraDeleteIssueArgs { + issue_key: string; +} + +interface JiraAddCommentArgs { + issue_key: string; + comment: string; +} + +// Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + authToken: string; +}>(); + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Jira access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +// Helper function to get Jira client from async local storage +async function getJiraClient(): Promise { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Auth token not found in AsyncLocalStorage'); + } + return await createJiraClient(store.authToken); +} + +// Create a Jira API client +async function createJiraClient(authToken: string): Promise { + // First, fetch the accessible resources to get the correct baseUrl + const accessibleResourcesUrl = 'https://api.atlassian.com/oauth/token/accessible-resources'; + + try { + const response = await fetch(accessibleResourcesUrl, { + headers: { + 'Authorization': `Bearer ${authToken}`, + 'Accept': 'application/json', + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`Failed to fetch accessible resources (${response.status}): ${errorText}`); + } + + // Define the type for the resources + interface JiraResource { + id: string; + name: string; + url: string; + scopes: string[]; + avatarUrl: string; + } + + const resources = await response.json() as JiraResource[]; + + // If no resources are found, throw an error + if (!resources || resources.length === 0) { + throw new Error('No accessible Jira resources found for this user'); + } + + // Use the first resource's cloud ID + const cloudId = resources[0].id; + // Store the site URL as well for reference + const siteUrl = resources[0].url; + + return { + baseUrl: siteUrl, + cloudId, + authToken, + async fetch(path: string, options: RequestInit = {}): Promise { + // Construct URL using the proper format + const url = path.startsWith('http') + ? path + : `https://api.atlassian.com/ex/jira/${cloudId}${path.startsWith('/') ? path : '/' + path}`; + + const headers: Record = { + ...options.headers as Record, + 'Content-Type': 'application/json', + 'Accept': 'application/json', + 'Authorization': `Bearer ${authToken}` + }; + + const response = await fetch(url, { + ...options, + headers, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`Jira API error (${response.status}): ${errorText}`); + } + + // Handle 204 No Content responses + if (response.status === 204) { + return {} as T; + } + + return response.json() as Promise; + }, + }; + } catch (error) { + console.error('Error creating Jira client:', error); + throw error; + } +} + +// Tool definitions +const searchIssuesTool: Tool = { + name: "jira_search_issues", + description: "Search for Jira issues using JQL", + inputSchema: { + type: "object", + properties: { + jql: { + type: "string", + description: "JQL query string to search for issues", + }, + maxResults: { + type: "number", + description: "Maximum number of results to return (default: 20)", + }, + fields: { + type: "array", + items: { + type: "string", + }, + description: "Fields to include in the response", + }, + }, + required: ["jql"], + }, + annotations: { + category: "JIRA_ISSUE", + readOnlyHint: true, + }, +}; + +const createIssueTool: Tool = { + name: "jira_create_issue", + description: "Create a new Jira issue with optional Epic link or parent for subtasks", + inputSchema: { + type: "object", + properties: { + project_key: { + type: "string", + description: "The JIRA project key (e.g. 'PROJ', 'DEV', 'SUPPORT'). This is the prefix of issue keys in your project. Never assume what it might be, always ask the user.", + }, + summary: { + type: "string", + description: "Summary/title of the issue", + }, + issue_type: { + type: "string", + description: "Issue type (e.g. 'Task', 'Bug', 'Story', 'Epic', 'Subtask'). The available types depend on your project configuration. For subtasks, use 'Subtask' (not 'Sub-task') and include parent in additional_fields.", + }, + assignee: { + type: "string", + description: "Assignee of the ticket (accountID, full name or e-mail)", + }, + description: { + type: "string", + description: "Issue description", + default: "", + }, + components: { + type: "string", + description: "Comma-separated list of component names to assign (e.g., 'Frontend,API')", + default: "", + }, + additional_fields: { + type: "string", + description: "Optional JSON string of additional fields to set. Examples:\n" + + '- Set priority: {"priority": {"name": "High"}}\n' + + '- Add labels: {"labels": ["frontend", "urgent"]}\n' + + '- Link to parent (for any issue type): {"parent": "PROJ-123"}\n' + + '- Set Fix Version/s: {"fixVersions": [{"id": "10020"}]}\n' + + '- Custom fields: {"customfield_10010": "value"}', + default: "{}", + }, + }, + required: ["project_key", "summary", "issue_type"], + }, + annotations: { + category: "JIRA_ISSUE", + }, +}; + +const getIssueTool: Tool = { + name: "jira_get_issue", + description: "Get details of a specific Jira issue including its Epic links and relationship information", + inputSchema: { + type: "object", + properties: { + issue_key: { + type: "string", + description: "Jira issue key (e.g., 'PROJ-123')", + }, + fields: { + type: "string", + description: "Fields to return. Can be a comma-separated list (e.g., 'summary,status,customfield_10010'), '*all' for all fields (including custom fields), or omitted for essential fields only", + default: DEFAULT_READ_JIRA_FIELDS.join(","), + }, + expand: { + type: "string", + description: "Optional fields to expand. Examples: 'renderedFields' (for rendered content), 'transitions' (for available status transitions), 'changelog' (for history)", + }, + comment_limit: { + type: "integer", + description: "Maximum number of comments to include (0 or null for no comments)", + minimum: 0, + maximum: 100, + default: 10, + }, + properties: { + type: "string", + description: "A comma-separated list of issue properties to return", + }, + update_history: { + type: "boolean", + description: "Whether to update the issue view history for the requesting user", + default: true, + }, + }, + required: ["issue_key"], + }, + annotations: { + category: "JIRA_ISSUE", + readOnlyHint: true, + }, +}; + +const updateIssueTool: Tool = { + name: "jira_update_issue", + description: "Update an existing Jira issue including changing status, adding Epic links, updating fields, etc.", + inputSchema: { + type: "object", + properties: { + issue_key: { + type: "string", + description: "Jira issue key (e.g., 'PROJ-123')", + }, + fields: { + type: "string", + description: "A valid JSON object of fields to update as a string. Example: '{\"summary\": \"New title\", \"description\": \"Updated description\", \"priority\": {\"name\": \"High\"}, \"assignee\": \"john.doe\"}'", + }, + additional_fields: { + type: "string", + description: "Optional JSON string of additional fields to update. Use this for custom fields or more complex updates.", + default: "{}", + }, + attachments: { + type: "string", + description: "Optional JSON string or comma-separated list of file paths to attach to the issue. Example: \"/path/to/file1.txt,/path/to/file2.txt\" or \"[\"/path/to/file1.txt\",\"/path/to/file2.txt\"]\"", + }, + }, + required: ["issue_key", "fields"], + }, + annotations: { + category: "JIRA_ISSUE", + }, +}; + +const addCommentTool: Tool = { + name: "jira_add_comment", + description: "Add a comment to a Jira issue", + inputSchema: { + type: "object", + properties: { + issue_key: { + type: "string", + description: "Jira issue key (e.g., 'PROJ-123')", + }, + comment: { + type: "string", + description: "Comment text in Markdown format", + }, + }, + required: ["issue_key", "comment"], + }, + annotations: { + category: "JIRA_COMMENT", + }, +}; + +const searchTool: Tool = { + name: "jira_search", + description: "Search Jira issues using JQL (Jira Query Language)", + inputSchema: { + type: "object", + properties: { + jql: { + type: "string", + description: "JQL query string (Jira Query Language). Examples:\n" + + '- Find Epics: "issuetype = Epic AND project = PROJ"\n' + + '- Find issues in Epic: "parent = PROJ-123"\n' + + "- Find by status: \"status = 'In Progress' AND project = PROJ\"\n" + + '- Find by assignee: "assignee = currentUser()"\n' + + '- Find recently updated: "updated >= -7d AND project = PROJ"\n' + + '- Find by label: "labels = frontend AND project = PROJ"\n' + + '- Find by priority: "priority = High AND project = PROJ"', + }, + fields: { + type: "string", + description: "Comma-separated fields to return in the results. Use '*all' for all fields, or specify individual fields like 'summary,status,assignee,priority'", + default: DEFAULT_READ_JIRA_FIELDS.join(","), + }, + limit: { + type: "number", + description: "Maximum number of results (1-50)", + default: 10, + minimum: 1, + maximum: 50, + }, + startAt: { + type: "number", + description: "Starting index for pagination (0-based)", + default: 0, + minimum: 0, + }, + projects_filter: { + type: "string", + description: "Comma-separated list of project keys to filter results by. Overrides the environment variable JIRA_PROJECTS_FILTER if provided.", + }, + }, + required: ["jql"], + }, + annotations: { + category: "JIRA_SEARCH", + readOnlyHint: true, + }, +}; + +const searchFieldsTool: Tool = { + name: "jira_search_fields", + description: "Search Jira fields by keyword with fuzzy match", + inputSchema: { + type: "object", + properties: { + keyword: { + type: "string", + description: "Keyword for fuzzy search. If left empty, lists the first 'limit' available fields in their default order.", + default: "", + }, + limit: { + type: "number", + description: "Maximum number of results", + default: 10, + minimum: 1, + }, + refresh: { + type: "boolean", + description: "Whether to force refresh the field list", + default: false, + }, + }, + required: [], + }, + annotations: { + category: "JIRA_FIELD", + readOnlyHint: true, + }, +}; + +const getProjectIssuesTool: Tool = { + name: "jira_get_project_issues", + description: "Get all issues for a specific Jira project", + inputSchema: { + type: "object", + properties: { + project_key: { + type: "string", + description: "The project key", + }, + limit: { + type: "number", + description: "Maximum number of results (1-50)", + default: 10, + minimum: 1, + maximum: 50, + }, + startAt: { + type: "number", + description: "Starting index for pagination (0-based)", + default: 0, + minimum: 0, + }, + }, + required: ["project_key"], + }, + annotations: { + category: "JIRA_PROJECT", + readOnlyHint: true, + }, +}; + +const getEpicIssuesTool: Tool = { + name: "jira_get_epic_issues", + description: "Get all issues linked to a specific epic", + inputSchema: { + type: "object", + properties: { + epic_key: { + type: "string", + description: "The key of the epic (e.g., 'PROJ-123')", + }, + limit: { + type: "number", + description: "Maximum number of issues to return (1-50)", + default: 10, + minimum: 1, + maximum: 50, + }, + startAt: { + type: "number", + description: "Starting index for pagination (0-based)", + default: 0, + minimum: 0, + }, + }, + required: ["epic_key"], + }, + annotations: { + category: "JIRA_ISSUE", + readOnlyHint: true, + }, +}; + +const getSprintsFromBoardTool: Tool = { + name: "jira_get_sprints_from_board", + description: "Get jira sprints from board by state", + inputSchema: { + type: "object", + properties: { + board_id: { + type: "string", + description: "The id of board (e.g., '1000')", + }, + state: { + type: "string", + description: "Sprint state (e.g., 'active', 'future', 'closed')", + }, + startAt: { + type: "number", + description: "Starting index for pagination (0-based)", + default: 0, + }, + limit: { + type: "number", + description: "Maximum number of results (1-50)", + default: 10, + minimum: 1, + maximum: 50, + }, + }, + }, + annotations: { + category: "JIRA_SPRINT", + readOnlyHint: true, + }, +}; + +const createSprintTool: Tool = { + name: "jira_create_sprint", + description: "Create Jira sprint for a board", + inputSchema: { + type: "object", + properties: { + board_id: { + type: "string", + description: "The id of board (e.g., '1000')", + }, + sprint_name: { + type: "string", + description: "Name of the sprint (e.g., 'Sprint 1')", + }, + start_date: { + type: "string", + description: "Start time for sprint (ISO 8601 format)", + }, + end_date: { + type: "string", + description: "End time for sprint (ISO 8601 format)", + }, + goal: { + type: "string", + description: "Goal of the sprint", + }, + }, + required: [ + "board_id", + "sprint_name", + "start_date", + "end_date", + ], + }, + annotations: { + category: "JIRA_SPRINT", + }, +}; + +const getSprintIssuesTool: Tool = { + name: "jira_get_sprint_issues", + description: "Get jira issues from sprint", + inputSchema: { + type: "object", + properties: { + sprint_id: { + type: "string", + description: "The id of sprint (e.g., '10001')", + }, + fields: { + type: "string", + description: "Comma-separated fields to return in the results. Use '*all' for all fields, or specify individual fields like 'summary,status,assignee,priority'", + default: DEFAULT_READ_JIRA_FIELDS.join(","), + }, + startAt: { + type: "number", + description: "Starting index for pagination (0-based)", + default: 0, + }, + limit: { + type: "number", + description: "Maximum number of results (1-50)", + default: 10, + minimum: 1, + maximum: 50, + }, + }, + required: ["sprint_id"], + }, + annotations: { + category: "JIRA_SPRINT", + readOnlyHint: true, + }, +}; + +const updateSprintTool: Tool = { + name: "jira_update_sprint", + description: "Update jira sprint", + inputSchema: { + type: "object", + properties: { + sprint_id: { + type: "string", + description: "The id of sprint (e.g., '10001')", + }, + sprint_name: { + type: "string", + description: "Optional: New name for the sprint", + }, + state: { + type: "string", + description: "Optional: New state for the sprint (future|active|closed)", + }, + start_date: { + type: "string", + description: "Optional: New start date for the sprint", + }, + end_date: { + type: "string", + description: "Optional: New end date for the sprint", + }, + goal: { + type: "string", + description: "Optional: New goal for the sprint", + }, + }, + required: ["sprint_id"], + }, + annotations: { + category: "JIRA_SPRINT", + }, +}; + +const deleteIssueTool: Tool = { + name: "jira_delete_issue", + description: "Delete an existing Jira issue", + inputSchema: { + type: "object", + properties: { + issue_key: { + type: "string", + description: "Jira issue key (e.g. PROJ-123)", + }, + }, + required: ["issue_key"], + }, + annotations: { + category: "JIRA_ISSUE", + }, +}; + +const getLinkTypesTool: Tool = { + name: "jira_get_link_types", + description: "Get all available issue link types", + inputSchema: { + type: "object", + properties: {}, + required: [], + }, + annotations: { + category: "JIRA_LINK", + readOnlyHint: true, + }, +}; + +const getJiraMcpServer = () => { + const server = new Server( + { + name: "jira-service", + version: "1.0.0", + }, + { + capabilities: { + tools: {}, + }, + } + ); + + server.setRequestHandler( + ListToolsRequestSchema, + async () => { + return { + tools: [ + searchTool, + getIssueTool, + searchFieldsTool, + getProjectIssuesTool, + getEpicIssuesTool, + getSprintsFromBoardTool, + createSprintTool, + getSprintIssuesTool, + updateSprintTool, + createIssueTool, + updateIssueTool, + deleteIssueTool, + addCommentTool, + getLinkTypesTool, + ], + }; + } + ); + + server.setRequestHandler( + CallToolRequestSchema, + async (request: CallToolRequest) => { + try { + // Validate the request parameters + if (!request.params?.name) { + throw new Error("Missing tool name"); + } + + const jira = await getJiraClient(); + + // Process the tool call based on the tool name + switch (request.params.name) { + case "jira_search": { + const args = request.params.arguments as unknown as JiraSearchArgs; + if (!args.jql) { + throw new Error("Missing required argument: jql"); + } + + const searchParams = new URLSearchParams(); + searchParams.append('jql', args.jql); + + if (args.limit) { + searchParams.append('maxResults', String(args.limit)); + } else { + searchParams.append('maxResults', "10"); + } + + if (args.startAt !== undefined) { + searchParams.append('startAt', String(args.startAt)); + } + + if (args.fields) { + if (args.fields === "*all") { + searchParams.append('fields', "*all"); + } else { + searchParams.append('fields', args.fields); + } + } else { + searchParams.append('fields', DEFAULT_READ_JIRA_FIELDS.join(',')); + } + + // Filter by project if specified + const projectsFilter = args.projects_filter || process.env.JIRA_PROJECTS_FILTER; + if (projectsFilter && !args.jql.toLowerCase().includes("project =")) { + const projects = projectsFilter.split(',').map(p => p.trim()); + let projectCondition = ""; + + if (projects.length === 1) { + projectCondition = `project = ${projects[0]}`; + } else if (projects.length > 1) { + projectCondition = `project in (${projects.join(',')})`; + } + + if (projectCondition) { + args.jql = `${args.jql} AND ${projectCondition}`; + searchParams.set('jql', args.jql); + } + } + + const response = await jira.fetch(`/rest/api/3/search?${searchParams.toString()}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_get_issue": { + const args = request.params.arguments as unknown as JiraGetIssueArgs; + if (!args.issue_key) { + throw new Error("Missing required argument: issue_key"); + } + + const searchParams = new URLSearchParams(); + + if (args.fields) { + if (args.fields === "*all") { + searchParams.append('fields', "*all"); + } else { + searchParams.append('fields', args.fields); + } + } else { + searchParams.append('fields', DEFAULT_READ_JIRA_FIELDS.join(',')); + } + + if (args.expand) { + searchParams.append('expand', args.expand); + } + + if (args.properties) { + searchParams.append('properties', args.properties); + } + + if (args.update_history !== undefined) { + searchParams.append('updateHistory', String(args.update_history)); + } + + const query = searchParams.toString() ? `?${searchParams.toString()}` : ''; + const response = await jira.fetch(`/rest/api/3/issue/${args.issue_key}${query}`); + + // Get comments if comment_limit > 0 + if (args.comment_limit && args.comment_limit > 0) { + const commentsParams = new URLSearchParams(); + commentsParams.append('maxResults', String(args.comment_limit)); + commentsParams.append('orderBy', 'created'); + + const commentsResponse = await jira.fetch( + `/rest/api/3/issue/${args.issue_key}/comment?${commentsParams.toString()}` + ); + + response.comments = commentsResponse.comments; + } + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_search_fields": { + const args = request.params.arguments as unknown as JiraSearchFieldsArgs; + + // Get all fields + const response = await jira.fetch('/rest/api/3/field'); + + // Filter and sort by keyword if provided + let filteredFields = response; + if (args.keyword && args.keyword.trim() !== '') { + const keyword = args.keyword.toLowerCase(); + filteredFields = response.filter((field: any) => { + const name = (field.name || '').toLowerCase(); + const id = (field.id || '').toLowerCase(); + const desc = (field.description || '').toLowerCase(); + + return name.includes(keyword) || id.includes(keyword) || desc.includes(keyword); + }); + } + + // Limit results + const limit = args.limit || 10; + const limitedFields = filteredFields.slice(0, limit); + + return { + content: [ + { + type: "text", + text: JSON.stringify(limitedFields), + }, + ], + }; + } + + case "jira_get_project_issues": { + const args = request.params.arguments as unknown as JiraGetProjectIssuesArgs; + if (!args.project_key) { + throw new Error("Missing required argument: project_key"); + } + + // Use JQL to search for project issues + const jql = `project = ${args.project_key}`; + const searchParams = new URLSearchParams(); + searchParams.append('jql', jql); + searchParams.append('maxResults', String(args.limit || 10)); + searchParams.append('startAt', String(args.startAt || 0)); + searchParams.append('fields', DEFAULT_READ_JIRA_FIELDS.join(',')); + + const response = await jira.fetch(`/rest/api/3/search?${searchParams.toString()}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_get_epic_issues": { + const args = request.params.arguments as unknown as JiraGetEpicIssuesArgs; + if (!args.epic_key) { + throw new Error("Missing required argument: epic_key"); + } + + // First get the Epic to confirm it exists and is of type Epic + const epicResponse = await jira.fetch(`/rest/api/3/issue/${args.epic_key}?fields=issuetype`); + + if (epicResponse.fields.issuetype.name !== 'Epic') { + throw new Error(`Issue ${args.epic_key} is not an Epic`); + } + + // Use JQL to search for issues in the Epic + const jql = `"Epic Link" = ${args.epic_key} OR parent = ${args.epic_key}`; + const searchParams = new URLSearchParams(); + searchParams.append('jql', jql); + searchParams.append('maxResults', String(args.limit || 10)); + searchParams.append('startAt', String(args.startAt || 0)); + searchParams.append('fields', DEFAULT_READ_JIRA_FIELDS.join(',')); + + const response = await jira.fetch(`/rest/api/3/search?${searchParams.toString()}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_get_sprints_from_board": { + const args = request.params.arguments as unknown as JiraGetSprintsFromBoardArgs; + if (!args.board_id) { + throw new Error("Missing required argument: board_id"); + } + + const searchParams = new URLSearchParams(); + + if (args.state) { + searchParams.append('state', args.state); + } + + searchParams.append('maxResults', String(args.limit || 10)); + searchParams.append('startAt', String(args.startAt || 0)); + + const query = searchParams.toString() ? `?${searchParams.toString()}` : ''; + const response = await jira.fetch(`/rest/agile/1.0/board/${args.board_id}/sprint${query}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_create_sprint": { + const args = request.params.arguments as unknown as JiraCreateSprintArgs; + if (!args.board_id || !args.sprint_name || !args.start_date || !args.end_date) { + throw new Error("Missing required arguments for sprint creation"); + } + + const payload = { + name: args.sprint_name, + startDate: args.start_date, + endDate: args.end_date, + originBoardId: args.board_id, + }; + + if (args.goal) { + Object.assign(payload, { goal: args.goal }); + } + + const response = await jira.fetch('/rest/agile/1.0/sprint', { + method: 'POST', + body: JSON.stringify(payload), + }); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_get_sprint_issues": { + const args = request.params.arguments as unknown as JiraGetSprintIssuesArgs; + if (!args.sprint_id) { + throw new Error("Missing required argument: sprint_id"); + } + + const searchParams = new URLSearchParams(); + + if (args.fields) { + if (args.fields === "*all") { + searchParams.append('fields', "*all"); + } else { + searchParams.append('fields', args.fields); + } + } else { + searchParams.append('fields', DEFAULT_READ_JIRA_FIELDS.join(',')); + } + + searchParams.append('maxResults', String(args.limit || 10)); + searchParams.append('startAt', String(args.startAt || 0)); + + const query = searchParams.toString() ? `?${searchParams.toString()}` : ''; + const response = await jira.fetch(`/rest/agile/1.0/sprint/${args.sprint_id}/issue${query}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_update_sprint": { + const args = request.params.arguments as unknown as JiraUpdateSprintArgs; + if (!args.sprint_id) { + throw new Error("Missing required argument: sprint_id"); + } + + const payload: any = {}; + + if (args.sprint_name) payload.name = args.sprint_name; + if (args.goal) payload.goal = args.goal; + if (args.start_date) payload.startDate = args.start_date; + if (args.end_date) payload.endDate = args.end_date; + if (args.state) payload.state = args.state; + + if (Object.keys(payload).length === 0) { + throw new Error("At least one field must be provided to update"); + } + + const response = await jira.fetch(`/rest/agile/1.0/sprint/${args.sprint_id}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_create_issue": { + const args = request.params.arguments as unknown as JiraCreateIssueArgs; + if (!args.project_key || !args.issue_type || !args.summary) { + throw new Error("Missing required arguments for issue creation"); + } + + // Construct issue creation payload + const payload: any = { + fields: { + project: { + key: args.project_key, + }, + issuetype: { + name: args.issue_type, + }, + summary: args.summary, + }, + }; + + // Add description if provided + if (args.description) { + payload.fields.description = { + type: "doc", + version: 1, + content: [ + { + type: "paragraph", + content: [ + { + type: "text", + text: args.description, + }, + ], + }, + ], + }; + } + + // Add assignee if provided + if (args.assignee) { + if (args.assignee.toLowerCase() === 'currentuser()') { + try { + const currentUser = await jira.fetch('/rest/api/3/myself'); + if (currentUser && currentUser.accountId) { + payload.fields.assignee = { accountId: currentUser.accountId }; + } else { + throw new Error("Failed to fetch current user's accountId for assignee."); + } + } catch (e) { + throw new Error(`Error resolving currentUser for assignee: ${(e as Error).message}`); + } + } else if (args.assignee.includes('@')) { + payload.fields.assignee = { emailAddress: args.assignee }; + } else if (args.assignee.startsWith('user:') || args.assignee.length > 20) { // Heuristic for accountId + payload.fields.assignee = { accountId: args.assignee }; + } else { + payload.fields.assignee = { name: args.assignee }; + } + } + + // Add components if provided + if (args.components) { + const componentNames = args.components.split(',').map(c => c.trim()); + payload.fields.components = componentNames.map(name => ({ name })); + } + + // Add additional fields if provided + if (args.additional_fields) { + try { + const additionalFields = JSON.parse(args.additional_fields); + Object.entries(additionalFields).forEach(([key, value]) => { + payload.fields[key] = value; + }); + } catch (e) { + throw new Error(`Invalid JSON in additional_fields: ${(e as Error).message}`); + } + } + + const response = await jira.fetch('/rest/api/3/issue', { + method: 'POST', + body: JSON.stringify(payload), + }); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_update_issue": { + const args = request.params.arguments as unknown as JiraUpdateIssueArgs; + if (!args.issue_key || !args.fields) { + throw new Error("Missing required arguments: issue_key and fields"); + } + + let fieldsObj: any = {}; + try { + fieldsObj = JSON.parse(args.fields); + } catch (e) { + throw new Error(`Invalid JSON in fields: ${(e as Error).message}`); + } + + // Resolve currentUser() for assignee + if (fieldsObj.assignee) { + let assigneeValue = fieldsObj.assignee; + + // Check if assignee is provided as a string "currentUser()" + // or as an object like { "id": "currentUser()" } or { "name": "currentUser()" } + if ( + (typeof assigneeValue === 'string' && assigneeValue.toLowerCase() === 'currentuser()') || + (typeof assigneeValue === 'object' && assigneeValue !== null && + (String(assigneeValue.id).toLowerCase() === 'currentuser()' || String(assigneeValue.name).toLowerCase() === 'currentuser()') + ) + ) { + try { + const currentUser = await jira.fetch('/rest/api/3/myself'); + if (currentUser && currentUser.accountId) { + fieldsObj.assignee = { accountId: currentUser.accountId }; + } else { + throw new Error("Failed to fetch current user's accountId."); + } + } catch (e) { + throw new Error(`Error resolving currentUser for assignee: ${(e as Error).message}`); + } + } + } + + // Construct issue update payload + const payload: any = { + fields: fieldsObj, + }; + + // Add additional fields if provided + if (args.additional_fields) { + try { + const additionalFields = JSON.parse(args.additional_fields); + Object.entries(additionalFields).forEach(([key, value]) => { + payload.fields[key] = value; + }); + } catch (e) { + throw new Error(`Invalid JSON in additional_fields: ${(e as Error).message}`); + } + } + + // Format description if provided + if (payload.fields.description && typeof payload.fields.description === 'string') { + payload.fields.description = { + type: "doc", + version: 1, + content: [ + { + type: "paragraph", + content: [ + { + type: "text", + text: payload.fields.description, + }, + ], + }, + ], + }; + } + + // Only send request if there are fields to update + let responseText = ""; + if (Object.keys(payload.fields).length > 0) { + const response = await jira.fetch(`/rest/api/3/issue/${args.issue_key}`, { + method: 'PUT', + body: JSON.stringify(payload), + }); + + responseText = JSON.stringify(response); + } + + // Get updated issue to return in response + const updatedIssue = await jira.fetch(`/rest/api/3/issue/${args.issue_key}`); + + return { + content: [ + { + type: "text", + text: JSON.stringify(updatedIssue), + }, + ], + }; + } + + case "jira_delete_issue": { + const args = request.params.arguments as unknown as JiraDeleteIssueArgs; + if (!args.issue_key) { + throw new Error("Missing required argument: issue_key"); + } + + const response = await jira.fetch(`/rest/api/3/issue/${args.issue_key}`, { + method: 'DELETE' + }); + + // Note: Jira DELETE issue often returns 204 No Content on success. + // If the response is empty or undefined, return a success message. + // Otherwise, return the actual response. + const responseText = response ? JSON.stringify(response) : JSON.stringify({ message: `Issue ${args.issue_key} deleted successfully (status 204)` }); + + return { + content: [ + { + type: "text", + text: responseText, + }, + ], + }; + } + + case "jira_add_comment": { + const args = request.params.arguments as unknown as JiraAddCommentArgs; + if (!args.issue_key || !args.comment) { + throw new Error("Missing required arguments: issue_key and comment"); + } + + const payload = { + body: { + type: "doc", + version: 1, + content: [ + { + type: "paragraph", + content: [ + { + type: "text", + text: args.comment, + }, + ], + }, + ], + }, + }; + + const response = await jira.fetch(`/rest/api/3/issue/${args.issue_key}/comment`, { + method: 'POST', + body: JSON.stringify(payload), + }); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + case "jira_get_link_types": { + const response = await jira.fetch('/rest/api/3/issueLinkType'); + + return { + content: [ + { + type: "text", + text: JSON.stringify(response), + }, + ], + }; + } + + default: + throw new Error(`Unknown tool: ${request.params.name}`); + } + } catch (error) { + console.error("Error executing tool:", error); + + if (error instanceof z.ZodError) { + throw new Error(`Invalid input: ${JSON.stringify(error.errors)}`); + } + + throw error; + } + } + ); + + return server; +}; + +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const authToken = extractAccessToken(req); + + const server = getJiraMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ authToken }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= +const transports = new Map(); + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + transports.delete(transport.sessionId); + }); + + transports.set(transport.sessionId, transport); + + const server = getJiraMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + + let transport: SSEServerTransport | undefined; + transport = sessionId ? transports.get(sessionId) : undefined; + if (transport) { + const authToken = extractAccessToken(req); + + asyncLocalStorage.run({ authToken }, async () => { + await transport!.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +const PORT = process.env.PORT || 5000; +app.listen(PORT, () => { + console.log(`Jira MCP Server running on port ${PORT}`); +}); \ No newline at end of file diff --git a/mcp_servers/jira/package-lock.json b/mcp_servers/jira/package-lock.json new file mode 100644 index 00000000..c95fb1fe --- /dev/null +++ b/mcp_servers/jira/package-lock.json @@ -0,0 +1,1280 @@ +{ + "name": "@klavis-ai/mcp-server-jira", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-jira", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.3.1", + "express": "^5.1.0", + "node-fetch": "^3.3.2", + "zod": "^3.24.2" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^22.13.5", + "typescript": "^5.7.3" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.1.tgz", + "integrity": "sha512-8q6+9aF0yA39/qWT/uaIj6zTpC+Qu07DnN/lb9mjoquCJsAh6l3HyYqc9O3t2j7GilseOQOQimLg7W3By6jqvg==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.1.tgz", + "integrity": "sha512-UZUw8vjpWFXuDnjFTh7/5c2TWDlQqeXHi6hcN7F2XSVT5P+WmUnnbFS3KA6Jnc6IsEqI2qCVu2bK0R0J4A8ZQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.15.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.3.tgz", + "integrity": "sha512-lX7HFZeHf4QG/J7tBZqrCAXwz9J5RD56Y6MpP0eJkka8p+K0RY/yBTW7CYFJ4VGCclxqOLKmiGP5juQc6MKgcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dotenv": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", + "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.6.tgz", + "integrity": "sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", + "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "license": "MIT", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "3.24.3", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.3.tgz", + "integrity": "sha512-HhY1oqzWCQWuUqvBFnsyrtZRhyPeR7SUGv+C4+MsisMuVfSPx8HpwWqH8tRahSlt6M3PiFAcoeFhZAqIXTxoSg==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.5", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz", + "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/jira/package.json b/mcp_servers/jira/package.json new file mode 100644 index 00000000..2d4fd87e --- /dev/null +++ b/mcp_servers/jira/package.json @@ -0,0 +1,25 @@ +{ + "name": "@klavis-ai/mcp-server-jira", + "version": "1.0.0", + "main": "index.js", + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.3.1", + "express": "^5.1.0", + "node-fetch": "^3.3.2", + "zod": "^3.24.2" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^22.13.5", + "typescript": "^5.7.3" + }, + "type": "module", + "scripts": { + "build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"" + } +} \ No newline at end of file diff --git a/mcp_servers/jira/tsconfig.json b/mcp_servers/jira/tsconfig.json new file mode 100644 index 00000000..543adcb3 --- /dev/null +++ b/mcp_servers/jira/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "./build", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./**/*"], + "exclude": ["node_modules"] +} \ No newline at end of file diff --git a/mcp_servers/linear/.env.example b/mcp_servers/linear/.env.example new file mode 100644 index 00000000..0f30888a --- /dev/null +++ b/mcp_servers/linear/.env.example @@ -0,0 +1 @@ +LINEAR_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/linear/Dockerfile b/mcp_servers/linear/Dockerfile new file mode 100644 index 00000000..8c1169e1 --- /dev/null +++ b/mcp_servers/linear/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/linear/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/linear/server.py . +COPY mcp_servers/linear/tools/ ./tools/ + +COPY mcp_servers/linear/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/linear/README.md b/mcp_servers/linear/README.md new file mode 100644 index 00000000..416ea5b2 --- /dev/null +++ b/mcp_servers/linear/README.md @@ -0,0 +1,78 @@ +# Linear MCP Server + +A Model Context Protocol (MCP) server for Linear integration. Manage issues, projects, and team workflows using Linear's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Linear with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("LINEAR", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/linear-mcp-server:latest + + +# Run Linear MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/linear-mcp-server:latest + + +# Run Linear MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_linear_api_key_here"}' \ + ghcr.io/klavis-ai/linear-mcp-server:latest +``` + +**OAuth Setup:** Linear requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Issue Management**: Create, read, update, and manage Linear issues +- **Project Operations**: Manage projects and project workflows +- **Team Management**: Handle team assignments and permissions +- **Status Tracking**: Update issue statuses and priorities +- **Label Management**: Apply and manage issue labels and tags + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/linear/requirements.txt b/mcp_servers/linear/requirements.txt new file mode 100644 index 00000000..7eda28a2 --- /dev/null +++ b/mcp_servers/linear/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +requests +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/linear/server.py b/mcp_servers/linear/server.py new file mode 100644 index 00000000..78798e47 --- /dev/null +++ b/mcp_servers/linear/server.py @@ -0,0 +1,887 @@ +import contextlib +import logging +import os +import json +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + get_teams, + get_issues, get_issue_by_id, create_issue, update_issue, search_issues, + get_projects, create_project, update_project, + get_comments, create_comment, update_comment +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +LINEAR_MCP_SERVER_PORT = int(os.getenv("LINEAR_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=LINEAR_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("linear-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="linear_get_teams", + description="Get all teams in the Linear workspace including workflow states and team members.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_TEAM", "readOnlyHint": True} + ), + ), + types.Tool( + name="linear_get_issues", + description="Get issues, optionally filtering by team or timestamps", + inputSchema={ + "type": "object", + "properties": { + "team_id": { + "type": "string", + "description": "Optional team ID to filter issues by team.", + }, + "limit": { + "type": "integer", + "description": "Maximum number of issues to return (default: 10).", + "default": 10, + }, + "filter": { + "type": "object", + "description": "Filter object for issues", + "properties": { + "priority": { + "type": "integer", + "description": "Filter by priority (0=No Priority, 1=Urgent, 2=High, 3=Medium, 4=Low)" + }, + "updatedAt": { + "type": "object", + "description": "Filter by update timestamp for issues.", + "properties": { + "gte": {"type": "string", "description": "Greater than or equal to timestamp (ISO 8601)"}, + "gt": {"type": "string", "description": "Greater than timestamp (ISO 8601)"}, + "lte": {"type": "string", "description": "Less than or equal to timestamp (ISO 8601)"}, + "lt": {"type": "string", "description": "Less than timestamp (ISO 8601)"}, + "eq": {"type": "string", "description": "Equal to timestamp (ISO 8601)"}, + }, + }, + "createdAt": { + "type": "object", + "description": "Filter by creation timestamp for issues.", + "properties": { + "gte": {"type": "string", "description": "Greater than or equal to timestamp (ISO 8601)"}, + "gt": {"type": "string", "description": "Greater than timestamp (ISO 8601)"}, + "lte": {"type": "string", "description": "Less than or equal to timestamp (ISO 8601)"}, + "lt": {"type": "string", "description": "Less than timestamp (ISO 8601)"}, + "eq": {"type": "string", "description": "Equal to timestamp (ISO 8601)"}, + }, + }, + }, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_ISSUE", "readOnlyHint": True} + ), + ), + types.Tool( + name="linear_get_issue_by_id", + description="Get a specific issue by its ID.", + inputSchema={ + "type": "object", + "required": ["issue_id"], + "properties": { + "issue_id": { + "type": "string", + "description": "The ID of the issue to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_ISSUE", "readOnlyHint": True} + ), + ), + types.Tool( + name="linear_create_issue", + description="Create a new issue in Linear.", + inputSchema={ + "type": "object", + "required": ["team_id", "title"], + "properties": { + "team_id": { + "type": "string", + "description": "The ID of the team to create the issue in.", + }, + "title": { + "type": "string", + "description": "The title of the issue.", + }, + "description": { + "type": "string", + "description": "The description of the issue in markdown format.", + }, + "assignee_id": { + "type": "string", + "description": "The ID of the user to assign the issue to.", + }, + "priority": { + "type": "integer", + "description": "The priority of the issue (0=None, 1=Urgent, 2=High, 3=Normal, 4=Low).", + }, + "state_id": { + "type": "string", + "description": "The ID of the workflow state to assign the issue to.", + }, + "project_id": { + "type": "string", + "description": "The ID of the project to assign the issue to.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_ISSUE"} + ), + ), + types.Tool( + name="linear_update_issue", + description="Update an existing issue in Linear.", + inputSchema={ + "type": "object", + "required": ["issue_id"], + "properties": { + "issue_id": { + "type": "string", + "description": "The ID of the issue to update.", + }, + "title": { + "type": "string", + "description": "The new title of the issue.", + }, + "description": { + "type": "string", + "description": "The new description of the issue in markdown format.", + }, + "assignee_id": { + "type": "string", + "description": "The ID of the user to assign the issue to.", + }, + "priority": { + "type": "integer", + "description": "The priority of the issue (0=None, 1=Urgent, 2=High, 3=Normal, 4=Low).", + }, + "state_id": { + "type": "string", + "description": "The ID of the workflow state to assign the issue to.", + }, + "project_id": { + "type": "string", + "description": "The ID of the project to assign the issue to.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_ISSUE"} + ), + ), + types.Tool( + name="linear_get_projects", + description="Get projects, optionally filtering by team or timestamps", + inputSchema={ + "type": "object", + "properties": { + "team_id": { + "type": "string", + "description": "Optional team ID to filter projects by team.", + }, + "limit": { + "type": "integer", + "description": "Maximum number of projects to return (default: 50).", + "default": 50, + }, + "filter": { + "type": "object", + "description": "Filter object for projects.", + "properties": { + "updatedAt": { + "type": "object", + "description": "Filter by update timestamp for projects.", + "properties": { + "gte": {"type": "string", "description": "Greater than or equal to timestamp (ISO 8601)"}, + "gt": {"type": "string", "description": "Greater than timestamp (ISO 8601)"}, + "lte": {"type": "string", "description": "Less than or equal to timestamp (ISO 8601)"}, + "lt": {"type": "string", "description": "Less than timestamp (ISO 8601)"}, + "eq": {"type": "string", "description": "Equal to timestamp (ISO 8601)"}, + }, + }, + "createdAt": { + "type": "object", + "description": "Filter by creation timestamp for projects.", + "properties": { + "gte": {"type": "string", "description": "Greater than or equal to timestamp (ISO 8601)"}, + "gt": {"type": "string", "description": "Greater than timestamp (ISO 8601)"}, + "lte": {"type": "string", "description": "Less than or equal to timestamp (ISO 8601)"}, + "lt": {"type": "string", "description": "Less than timestamp (ISO 8601)"}, + "eq": {"type": "string", "description": "Equal to timestamp (ISO 8601)"}, + }, + }, + }, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_PROJECT", "readOnlyHint": True} + ), + ), + types.Tool( + name="linear_create_project", + description="Create a new project in Linear.", + inputSchema={ + "type": "object", + "required": ["name"], + "properties": { + "name": { + "type": "string", + "description": "The name of the project.", + }, + "description": { + "type": "string", + "description": "The description of the project.", + }, + "team_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "Array of team IDs to associate with the project.", + }, + "lead_id": { + "type": "string", + "description": "The ID of the user to set as project lead.", + }, + "target_date": { + "type": "string", + "description": "The target completion date for the project (ISO date string).", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_PROJECT"} + ), + ), + types.Tool( + name="linear_update_project", + description="Update an existing project in Linear.", + inputSchema={ + "type": "object", + "required": ["project_id"], + "properties": { + "project_id": { + "type": "string", + "description": "The ID of the project to update.", + }, + "name": { + "type": "string", + "description": "The new name of the project.", + }, + "description": { + "type": "string", + "description": "The new description of the project.", + }, + "state": { + "type": "string", + "description": "The new state of the project (planned, started, completed, canceled).", + }, + "target_date": { + "type": "string", + "description": "The new target completion date (ISO date string).", + }, + "lead_id": { + "type": "string", + "description": "The ID of the user to set as project lead.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_PROJECT"} + ), + ), + types.Tool( + name="linear_get_comments", + description="Get comments for a specific issue.", + inputSchema={ + "type": "object", + "required": ["issue_id"], + "properties": { + "issue_id": { + "type": "string", + "description": "The ID of the issue to get comments for.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_COMMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="linear_create_comment", + description="Create a comment on an issue.", + inputSchema={ + "type": "object", + "required": ["issue_id", "body"], + "properties": { + "issue_id": { + "type": "string", + "description": "The ID of the issue to comment on.", + }, + "body": { + "type": "string", + "description": "The content of the comment in markdown format.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_COMMENT"} + ), + ), + types.Tool( + name="linear_update_comment", + description="Update an existing comment.", + inputSchema={ + "type": "object", + "required": ["comment_id", "body"], + "properties": { + "comment_id": { + "type": "string", + "description": "The ID of the comment to update.", + }, + "body": { + "type": "string", + "description": "The new content of the comment in markdown format.", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_COMMENT"} + ), + ), + types.Tool( + name="linear_search_issues", + description="Search for issues by text query.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "The text to search for in issue titles.", + }, + "team_id": { + "type": "string", + "description": "Optional team ID to limit search to specific team.", + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (default: 20).", + "default": 20, + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "LINEAR_ISSUE", "readOnlyHint": True} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "linear_get_teams": + try: + result = await get_teams() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_get_issues": + team_id = arguments.get("team_id") + limit = arguments.get("limit", 10) + filter_param = arguments.get("filter") + try: + result = await get_issues(team_id, limit, filter_param) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_get_issue_by_id": + issue_id = arguments.get("issue_id") + if not issue_id: + return [ + types.TextContent( + type="text", + text="Error: issue_id parameter is required", + ) + ] + try: + result = await get_issue_by_id(issue_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_create_issue": + team_id = arguments.get("team_id") + title = arguments.get("title") + if not team_id or not title: + return [ + types.TextContent( + type="text", + text="Error: team_id and title parameters are required", + ) + ] + + description = arguments.get("description") + assignee_id = arguments.get("assignee_id") + priority = arguments.get("priority") + state_id = arguments.get("state_id") + project_id = arguments.get("project_id") + + try: + result = await create_issue(team_id, title, description, assignee_id, priority, state_id, project_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_update_issue": + issue_id = arguments.get("issue_id") + if not issue_id: + return [ + types.TextContent( + type="text", + text="Error: issue_id parameter is required", + ) + ] + + title = arguments.get("title") + description = arguments.get("description") + assignee_id = arguments.get("assignee_id") + priority = arguments.get("priority") + state_id = arguments.get("state_id") + project_id = arguments.get("project_id") + + try: + result = await update_issue(issue_id, title, description, assignee_id, priority, state_id, project_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_get_projects": + team_id = arguments.get("team_id") + limit = arguments.get("limit", 50) + filter_param = arguments.get("filter") + try: + result = await get_projects(team_id, limit, filter_param) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_create_project": + name = arguments.get("name") + if not name: + return [ + types.TextContent( + type="text", + text="Error: name parameter is required", + ) + ] + + description = arguments.get("description") + team_ids = arguments.get("team_ids") + lead_id = arguments.get("lead_id") + target_date = arguments.get("target_date") + + try: + result = await create_project(name, description, team_ids, lead_id, target_date) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_update_project": + project_id = arguments.get("project_id") + if not project_id: + return [ + types.TextContent( + type="text", + text="Error: project_id parameter is required", + ) + ] + + name = arguments.get("name") + description = arguments.get("description") + state = arguments.get("state") + target_date = arguments.get("target_date") + lead_id = arguments.get("lead_id") + + try: + result = await update_project(project_id, name, description, state, target_date, lead_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_get_comments": + issue_id = arguments.get("issue_id") + if not issue_id: + return [ + types.TextContent( + type="text", + text="Error: issue_id parameter is required", + ) + ] + try: + result = await get_comments(issue_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_create_comment": + issue_id = arguments.get("issue_id") + body = arguments.get("body") + if not issue_id or not body: + return [ + types.TextContent( + type="text", + text="Error: issue_id and body parameters are required", + ) + ] + try: + result = await create_comment(issue_id, body) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_update_comment": + comment_id = arguments.get("comment_id") + body = arguments.get("body") + if not comment_id or not body: + return [ + types.TextContent( + type="text", + text="Error: comment_id and body parameters are required", + ) + ] + try: + result = await update_comment(comment_id, body) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linear_search_issues": + query_text = arguments.get("query") + if not query_text: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + team_id = arguments.get("team_id") + limit = arguments.get("limit", 20) + + try: + result = await search_issues(query_text, team_id, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/linear/tools/__init__.py b/mcp_servers/linear/tools/__init__.py new file mode 100644 index 00000000..ae680384 --- /dev/null +++ b/mcp_servers/linear/tools/__init__.py @@ -0,0 +1,33 @@ +# Linear MCP Server Tools +# This package contains all the tool implementations organized by object type + +from .teams import get_teams +from .issues import get_issues, get_issue_by_id, create_issue, update_issue, search_issues +from .projects import get_projects, create_project, update_project +from .comments import get_comments, create_comment, update_comment +from .base import auth_token_context + +__all__ = [ + # Teams + "get_teams", + + # Issues + "get_issues", + "get_issue_by_id", + "create_issue", + "update_issue", + "search_issues", + + # Projects + "get_projects", + "create_project", + "update_project", + + # Comments + "get_comments", + "create_comment", + "update_comment", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/linear/tools/base.py b/mcp_servers/linear/tools/base.py new file mode 100644 index 00000000..4cf581ab --- /dev/null +++ b/mcp_servers/linear/tools/base.py @@ -0,0 +1,37 @@ +import logging +from typing import Any, Dict +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +LINEAR_API_ENDPOINT = "https://api.linear.app/graphql" + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +async def make_graphql_request(query: str, variables: Dict[str, Any] = None) -> Dict[str, Any]: + """Make a GraphQL request to Linear API.""" + access_token = get_auth_token() + + headers = { + "Authorization": access_token, + "Content-Type": "application/json" + } + + payload = {"query": query} + if variables: + payload["variables"] = variables + + async with httpx.AsyncClient() as client: + response = await client.post(LINEAR_API_ENDPOINT, json=payload, headers=headers) + response.raise_for_status() + return response.json() \ No newline at end of file diff --git a/mcp_servers/linear/tools/comments.py b/mcp_servers/linear/tools/comments.py new file mode 100644 index 00000000..0551c295 --- /dev/null +++ b/mcp_servers/linear/tools/comments.py @@ -0,0 +1,113 @@ +import logging +from typing import Any, Dict +from .base import make_graphql_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_comments(issue_id: str) -> Dict[str, Any]: + """Get comments for a specific issue.""" + logger.info(f"Executing tool: get_comments with issue_id: {issue_id}") + try: + query = """ + query IssueComments($id: String!) { + issue(id: $id) { + id + identifier + title + comments { + nodes { + id + body + user { + id + name + email + } + createdAt + updatedAt + url + } + } + } + } + """ + variables = {"id": issue_id} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool get_comments: {e}") + raise e + +async def create_comment(issue_id: str, body: str) -> Dict[str, Any]: + """Create a comment on an issue.""" + logger.info(f"Executing tool: create_comment on issue: {issue_id}") + try: + query = """ + mutation CommentCreate($input: CommentCreateInput!) { + commentCreate(input: $input) { + success + comment { + id + body + user { + id + name + email + } + issue { + id + identifier + title + } + createdAt + url + } + } + } + """ + + input_data = { + "issueId": issue_id, + "body": body + } + + variables = {"input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool create_comment: {e}") + raise e + +async def update_comment(comment_id: str, body: str) -> Dict[str, Any]: + """Update an existing comment.""" + logger.info(f"Executing tool: update_comment with comment_id: {comment_id}") + try: + query = """ + mutation CommentUpdate($id: String!, $input: CommentUpdateInput!) { + commentUpdate(id: $id, input: $input) { + success + comment { + id + body + user { + id + name + email + } + issue { + id + identifier + title + } + updatedAt + url + } + } + } + """ + + input_data = {"body": body} + variables = {"id": comment_id, "input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool update_comment: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/linear/tools/issues.py b/mcp_servers/linear/tools/issues.py new file mode 100644 index 00000000..403e8c83 --- /dev/null +++ b/mcp_servers/linear/tools/issues.py @@ -0,0 +1,398 @@ +import logging +from typing import Any, Dict +from .base import make_graphql_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_issues(team_id: str = None, limit: int = 50, filter: Dict[str, Any] = None) -> Dict[str, Any]: + """Get issues with optional filtering by team and timestamps.""" + logger.info(f"Executing tool: get_issues with team_id: {team_id}, limit: {limit}, filter: {filter}") + try: + # Build the filter object + issue_filter = {} + + # Add team filter if specified + if team_id: + issue_filter["team"] = {"id": {"eq": team_id}} + + # Add timestamp filters if provided + if filter: + if "updatedAt" in filter: + issue_filter["updatedAt"] = filter["updatedAt"] + if "createdAt" in filter: + issue_filter["createdAt"] = filter["createdAt"] + if "priority" in filter: + issue_filter["priority"] = {"eq": filter["priority"]} + + # Use filtered query if we have any filters + if issue_filter: + query = """ + query FilteredIssues($filter: IssueFilter, $first: Int) { + issues(filter: $filter, first: $first) { + nodes { + id + identifier + title + description + priority + state { + id + name + type + } + assignee { + id + name + email + } + creator { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + createdAt + updatedAt + url + } + } + } + """ + variables = {"filter": issue_filter, "first": limit} + else: + # No filters, use simple query + query = """ + query Issues($first: Int) { + issues(first: $first) { + nodes { + id + identifier + title + description + priority + state { + id + name + type + } + assignee { + id + name + email + } + creator { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + createdAt + updatedAt + url + } + } + } + """ + variables = {"first": limit} + + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool get_issues: {e}") + raise e + +async def get_issue_by_id(issue_id: str) -> Dict[str, Any]: + """Get a specific issue by ID.""" + logger.info(f"Executing tool: get_issue_by_id with issue_id: {issue_id}") + try: + query = """ + query Issue($id: String!) { + issue(id: $id) { + id + identifier + title + description + priority + priorityLabel + state { + id + name + type + } + assignee { + id + name + email + } + creator { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + comments { + nodes { + id + body + user { + id + name + } + createdAt + updatedAt + } + } + createdAt + updatedAt + url + } + } + """ + variables = {"id": issue_id} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool get_issue_by_id: {e}") + raise e + +async def create_issue(team_id: str, title: str, description: str = None, assignee_id: str = None, priority: int = None, state_id: str = None, project_id: str = None) -> Dict[str, Any]: + """Create a new issue.""" + logger.info(f"Executing tool: create_issue with title: {title}") + try: + query = """ + mutation IssueCreate($input: IssueCreateInput!) { + issueCreate(input: $input) { + success + issue { + id + identifier + title + description + priority + priorityLabel + state { + id + name + type + } + assignee { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + createdAt + url + } + } + } + """ + + input_data = { + "teamId": team_id, + "title": title + } + + if description: + input_data["description"] = description + if assignee_id: + input_data["assigneeId"] = assignee_id + if priority is not None: + input_data["priority"] = priority + if state_id: + input_data["stateId"] = state_id + if project_id: + input_data["projectId"] = project_id + + variables = {"input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool create_issue: {e}") + raise e + +async def update_issue(issue_id: str, title: str = None, description: str = None, assignee_id: str = None, priority: int = None, state_id: str = None, project_id: str = None) -> Dict[str, Any]: + """Update an existing issue.""" + logger.info(f"Executing tool: update_issue with issue_id: {issue_id}") + try: + query = """ + mutation IssueUpdate($id: String!, $input: IssueUpdateInput!) { + issueUpdate(id: $id, input: $input) { + success + issue { + id + identifier + title + description + priority + priorityLabel + state { + id + name + type + } + assignee { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + updatedAt + url + } + } + } + """ + + input_data = {} + if title: + input_data["title"] = title + if description is not None: + input_data["description"] = description + if assignee_id: + input_data["assigneeId"] = assignee_id + if priority is not None: + input_data["priority"] = priority + if state_id: + input_data["stateId"] = state_id + if project_id: + input_data["projectId"] = project_id + + variables = {"id": issue_id, "input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool update_issue: {e}") + raise e + +async def search_issues(query_text: str, team_id: str = None, limit: int = 20) -> Dict[str, Any]: + """Search for issues by text.""" + logger.info(f"Executing tool: search_issues with query: {query_text}") + try: + if team_id: + query = """ + query SearchIssues($filter: IssueFilter, $first: Int) { + issues(filter: $filter, first: $first) { + nodes { + id + identifier + title + description + priority + state { + id + name + type + } + assignee { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + createdAt + updatedAt + url + } + } + } + """ + variables = { + "filter": { + "team": {"id": {"eq": team_id}}, + "title": {"containsIgnoreCase": query_text} + }, + "first": limit + } + else: + query = """ + query SearchIssues($filter: IssueFilter, $first: Int) { + issues(filter: $filter, first: $first) { + nodes { + id + identifier + title + description + priority + state { + id + name + type + } + assignee { + id + name + email + } + team { + id + name + key + } + project { + id + name + } + createdAt + updatedAt + url + } + } + } + """ + variables = { + "filter": { + "title": {"containsIgnoreCase": query_text} + }, + "first": limit + } + + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool search_issues: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/linear/tools/projects.py b/mcp_servers/linear/tools/projects.py new file mode 100644 index 00000000..42bd0c81 --- /dev/null +++ b/mcp_servers/linear/tools/projects.py @@ -0,0 +1,212 @@ +import logging +from typing import Any, Dict +from .base import make_graphql_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_projects(team_id: str = None, limit: int = 50, filter: Dict[str, Any] = None) -> Dict[str, Any]: + """Get projects with optional filtering by team and timestamps.""" + logger.info(f"Executing tool: get_projects with team_id: {team_id}, limit: {limit}, filter: {filter}") + try: + # Build the filter object + project_filter = {} + + # Add team filter if specified via team_id parameter (for backward compatibility) + if team_id: + # For projects, we need to filter by teams relation + project_filter["teams"] = {"some": {"id": {"eq": team_id}}} + + # Add timestamp filters if provided + if filter: + if "updatedAt" in filter: + project_filter["updatedAt"] = filter["updatedAt"] + if "createdAt" in filter: + project_filter["createdAt"] = filter["createdAt"] + + # Use filtered query if we have any filters + if project_filter: + query = """ + query FilteredProjects($filter: ProjectFilter, $first: Int) { + projects(filter: $filter, first: $first) { + nodes { + id + name + description + state + progress + targetDate + lead { + id + name + email + } + members { + nodes { + id + name + email + } + } + teams { + nodes { + id + name + key + } + } + createdAt + updatedAt + url + } + } + } + """ + variables = {"filter": project_filter, "first": limit} + else: + # No filters, use simple query + query = """ + query Projects($first: Int) { + projects(first: $first) { + nodes { + id + name + description + state + progress + targetDate + lead { + id + name + email + } + members { + nodes { + id + name + email + } + } + teams { + nodes { + id + name + key + } + } + createdAt + updatedAt + url + } + } + } + """ + variables = {"first": limit} + + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool get_projects: {e}") + raise e + +async def create_project(name: str, description: str = None, team_ids: list = None, lead_id: str = None, target_date: str = None) -> Dict[str, Any]: + """Create a new project.""" + logger.info(f"Executing tool: create_project with name: {name}") + try: + query = """ + mutation ProjectCreate($input: ProjectCreateInput!) { + projectCreate(input: $input) { + success + project { + id + name + description + state + progress + targetDate + lead { + id + name + email + } + teams { + nodes { + id + name + key + } + } + createdAt + url + } + } + } + """ + + input_data = {"name": name} + + if description: + input_data["description"] = description + if team_ids: + input_data["teamIds"] = team_ids + if lead_id: + input_data["leadId"] = lead_id + if target_date: + input_data["targetDate"] = target_date + + variables = {"input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool create_project: {e}") + raise e + +async def update_project(project_id: str, name: str = None, description: str = None, state: str = None, target_date: str = None, lead_id: str = None) -> Dict[str, Any]: + """Update an existing project.""" + logger.info(f"Executing tool: update_project with project_id: {project_id}") + try: + query = """ + mutation ProjectUpdate($id: String!, $input: ProjectUpdateInput!) { + projectUpdate(id: $id, input: $input) { + success + project { + id + name + description + state + progress + targetDate + lead { + id + name + email + } + teams { + nodes { + id + name + key + } + } + updatedAt + url + } + } + } + """ + + input_data = {} + if name: + input_data["name"] = name + if description is not None: + input_data["description"] = description + if state: + input_data["state"] = state + if target_date: + input_data["targetDate"] = target_date + if lead_id: + input_data["leadId"] = lead_id + + variables = {"id": project_id, "input": input_data} + return await make_graphql_request(query, variables) + except Exception as e: + logger.exception(f"Error executing tool update_project: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/linear/tools/teams.py b/mcp_servers/linear/tools/teams.py new file mode 100644 index 00000000..7f15bf70 --- /dev/null +++ b/mcp_servers/linear/tools/teams.py @@ -0,0 +1,46 @@ +import logging +from typing import Any, Dict +from .base import make_graphql_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_teams() -> Dict[str, Any]: + """Get all teams in the Linear workspace including workflow states and team members.""" + logger.info("Executing tool: get_teams") + try: + query = """ + query Teams { + teams { + nodes { + id + name + key + description + private + createdAt + updatedAt + states { + nodes { + id + name + type + color + } + } + members { + nodes { + id + name + displayName + email + } + } + } + } + } + """ + return await make_graphql_request(query) + except Exception as e: + logger.exception(f"Error executing tool get_teams: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/linkedin/.env.example b/mcp_servers/linkedin/.env.example new file mode 100644 index 00000000..2b0d0158 --- /dev/null +++ b/mcp_servers/linkedin/.env.example @@ -0,0 +1,2 @@ +LINKEDIN_ACCESS_TOKEN=YOUR_LINKEDIN_ACCESS_TOKEN_HERE +LINKEDIN_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/linkedin/Dockerfile b/mcp_servers/linkedin/Dockerfile new file mode 100644 index 00000000..47c9843c --- /dev/null +++ b/mcp_servers/linkedin/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/linkedin/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/linkedin/server.py . +COPY mcp_servers/linkedin/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/linkedin/README.md b/mcp_servers/linkedin/README.md new file mode 100644 index 00000000..113ebf47 --- /dev/null +++ b/mcp_servers/linkedin/README.md @@ -0,0 +1,78 @@ +# LinkedIn MCP Server + +A Model Context Protocol (MCP) server for LinkedIn integration. Manage professional profiles, posts, and networking using LinkedIn's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to LinkedIn with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("LINKEDIN", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/linkedin-mcp-server:latest + + +# Run LinkedIn MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/linkedin-mcp-server:latest + + +# Run LinkedIn MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_linkedin_access_token_here"}' \ + ghcr.io/klavis-ai/linkedin-mcp-server:latest +``` + +**OAuth Setup:** LinkedIn requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Profile Management**: Access and update professional profiles +- **Post Operations**: Create, read, and manage LinkedIn posts +- **Connection Management**: Manage professional connections +- **Company Pages**: Handle company page content and updates +- **Analytics**: Access post and profile analytics + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/linkedin/requirements.txt b/mcp_servers/linkedin/requirements.txt new file mode 100644 index 00000000..70531a2c --- /dev/null +++ b/mcp_servers/linkedin/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +fastapi +uvicorn[standard] +click>=8.0.0 +pydantic>=2.5.0 +aiohttp>=3.8.0 +httpx>=0.27.0 +python-dotenv>=1.0.0 +typing-extensions +starlette>=0.27.0 \ No newline at end of file diff --git a/mcp_servers/linkedin/server.py b/mcp_servers/linkedin/server.py new file mode 100644 index 00000000..eb758111 --- /dev/null +++ b/mcp_servers/linkedin/server.py @@ -0,0 +1,443 @@ +import os +import logging +import contextlib +import json +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict, List, Optional + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + linkedin_token_context, + get_profile_info, + create_post, + format_rich_post, + create_url_share, +) + +load_dotenv() + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("linkedin-mcp-server") + +LINKEDIN_ACCESS_TOKEN = os.getenv("LINKEDIN_ACCESS_TOKEN") or "" # for local use +LINKEDIN_MCP_SERVER_PORT = int(os.getenv("LINKEDIN_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=LINKEDIN_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("linkedin-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="linkedin_get_profile_info", + description="Get LinkedIn profile information. If person_id is not provided, gets current user's profile.", + inputSchema={ + "type": "object", + "properties": { + "person_id": { + "type": "string", + "description": "The LinkedIn person ID to retrieve information for. Leave empty for current user." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "LINKEDIN_PROFILE", "readOnlyHint": True}) + ), + types.Tool( + name="linkedin_create_post", + description="Create a post on LinkedIn with optional title for article-style posts.", + inputSchema={ + "type": "object", + "required": ["text"], + "properties": { + "text": { + "type": "string", + "description": "The text content of the post." + }, + "title": { + "type": "string", + "description": "Optional title for article-style posts. When provided, creates an article format." + }, + "hashtags": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of hashtags to add to the post (# will be added automatically)." + }, + "visibility": { + "type": "string", + "description": "Post visibility (PUBLIC, CONNECTIONS, LOGGED_IN_USERS).", + "default": "PUBLIC" + } + } + }, + annotations=types.ToolAnnotations(**{"category": "LINKEDIN_POST"}) + ), + types.Tool( + name="linkedin_format_rich_post", + description="Format rich text for LinkedIn posts with bold, italic, lists, mentions, and hashtags (utility function - doesn't post).", + inputSchema={ + "type": "object", + "required": ["text"], + "properties": { + "text": { + "type": "string", + "description": "The base text content to format." + }, + "bold_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text phrases to make bold (will be wrapped with **)." + }, + "italic_text": { + "type": "array", + "items": {"type": "string"}, + "description": "Text phrases to make italic (will be wrapped with *)." + }, + "bullet_points": { + "type": "array", + "items": {"type": "string"}, + "description": "List of bullet points to add." + }, + "numbered_list": { + "type": "array", + "items": {"type": "string"}, + "description": "List of numbered items to add." + }, + "hashtags": { + "type": "array", + "items": {"type": "string"}, + "description": "List of hashtags to add." + }, + "mentions": { + "type": "array", + "items": {"type": "string"}, + "description": "List of usernames to mention (@ will be added automatically)." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "LINKEDIN_POST"}) + ), + types.Tool( + name="linkedin_create_url_share", + description="Share URLs with metadata preview on LinkedIn.", + inputSchema={ + "type": "object", + "required": ["url", "text"], + "properties": { + "url": { + "type": "string", + "description": "The URL to share (must be a valid URL)." + }, + "text": { + "type": "string", + "description": "Commentary text to accompany the shared URL." + }, + "title": { + "type": "string", + "description": "Optional title for the shared URL content." + }, + "description": { + "type": "string", + "description": "Optional description for the shared URL content." + }, + "visibility": { + "type": "string", + "description": "Post visibility (PUBLIC, CONNECTIONS, LOGGED_IN_USERS).", + "default": "PUBLIC" + } + } + }, + annotations=types.ToolAnnotations(**{"category": "LINKEDIN_POST"}) + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "linkedin_create_post": + text = arguments.get("text") + title = arguments.get("title") + hashtags = arguments.get("hashtags") + visibility = arguments.get("visibility", "PUBLIC") + if not text: + return [ + types.TextContent( + type="text", + text="Error: text parameter is required", + ) + ] + try: + result = await create_post(text, title, visibility, hashtags) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linkedin_format_rich_post": + text = arguments.get("text") + bold_text = arguments.get("bold_text") + italic_text = arguments.get("italic_text") + bullet_points = arguments.get("bullet_points") + numbered_list = arguments.get("numbered_list") + hashtags = arguments.get("hashtags") + mentions = arguments.get("mentions") + + if not text: + return [ + types.TextContent( + type="text", + text="Error: text parameter is required", + ) + ] + try: + result = format_rich_post( + text=text, + bold_text=bold_text, + italic_text=italic_text, + bullet_points=bullet_points, + numbered_list=numbered_list, + hashtags=hashtags, + mentions=mentions + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linkedin_create_url_share": + url = arguments.get("url") + text = arguments.get("text") + title = arguments.get("title") + description = arguments.get("description") + visibility = arguments.get("visibility", "PUBLIC") + + if not url: + return [ + types.TextContent( + type="text", + text="Error: url parameter is required", + ) + ] + if not text: + return [ + types.TextContent( + type="text", + text="Error: text parameter is required", + ) + ] + try: + result = await create_url_share(url, text, title, description, visibility) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "linkedin_get_profile_info": + person_id = arguments.get("person_id") + try: + result = await get_profile_info(person_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + if not auth_token: + auth_token = LINKEDIN_ACCESS_TOKEN # Fallback to environment + + # Set the LinkedIn token in context for this request + token = linkedin_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + linkedin_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + if not auth_token: + auth_token = LINKEDIN_ACCESS_TOKEN # Fallback to environment + + # Set the LinkedIn token in context for this request + token = linkedin_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + linkedin_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/linkedin/tools/__init__.py b/mcp_servers/linkedin/tools/__init__.py new file mode 100644 index 00000000..571e260a --- /dev/null +++ b/mcp_servers/linkedin/tools/__init__.py @@ -0,0 +1,16 @@ +from .auth import get_profile_info +from .posts import create_post, format_rich_post, create_url_share +from .base import linkedin_token_context + +__all__ = [ + # Auth/Profile + "get_profile_info", + + # Posts + "create_post", + "create_url_share", + "format_rich_post", + + # Base + "linkedin_token_context", +] diff --git a/mcp_servers/linkedin/tools/auth.py b/mcp_servers/linkedin/tools/auth.py new file mode 100644 index 00000000..a355af02 --- /dev/null +++ b/mcp_servers/linkedin/tools/auth.py @@ -0,0 +1,35 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_linkedin_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_profile_info(person_id: Optional[str] = None) -> Dict[str, Any]: + """Get LinkedIn profile information. If person_id is None, gets current user's profile.""" + logger.info(f"Executing tool: get_profile_info with person_id: {person_id}") + try: + if person_id: + # Note: Getting other users' profile info requires additional permissions + return {"error": "Getting other users' profile information requires elevated LinkedIn API permissions"} + else: + # Use the working userinfo endpoint for current user + endpoint = "/userinfo" + + # Get basic profile info + profile_data = await make_linkedin_request("GET", endpoint) + + profile_info = { + "id": profile_data.get("sub"), + "firstName": profile_data.get("given_name"), + "lastName": profile_data.get("family_name"), + "name": profile_data.get("name"), + "email": profile_data.get("email"), + "email_verified": profile_data.get("email_verified"), + "locale": profile_data.get("locale") + } + + return profile_info + except Exception as e: + logger.exception(f"Error executing tool get_profile_info: {e}") + raise e diff --git a/mcp_servers/linkedin/tools/base.py b/mcp_servers/linkedin/tools/base.py new file mode 100644 index 00000000..9f31be3b --- /dev/null +++ b/mcp_servers/linkedin/tools/base.py @@ -0,0 +1,97 @@ +import os +import logging +import ssl +from typing import Any, Dict, Optional +from contextvars import ContextVar +import aiohttp + +# Configure logging +logger = logging.getLogger(__name__) + +# LinkedIn API constants +LINKEDIN_API_BASE = "https://api.linkedin.com/v2" + +# Context variable to store the LinkedIn access token for each request +linkedin_token_context: ContextVar[str] = ContextVar('linkedin_token') + +def get_linkedin_access_token() -> str: + """Get the LinkedIn access token from context or environment.""" + try: + # Try to get from context first (for MCP server usage) + return linkedin_token_context.get() + except LookupError: + # Fall back to environment variable (for standalone usage) + token = os.getenv("LINKEDIN_ACCESS_TOKEN") + if not token: + raise RuntimeError("LinkedIn access token not found in request context or environment") + return token + +def _get_linkedin_headers() -> Dict[str, str]: + """Create standard headers for LinkedIn API calls.""" + access_token = get_linkedin_access_token() + return { + "Authorization": f"Bearer {access_token}", + "Content-Type": "application/json", + "X-Restli-Protocol-Version": "2.0.0" + } + +def _get_ssl_context(): + """Create secure SSL context.""" + return ssl.create_default_context() + +async def make_linkedin_request( + method: str, + endpoint: str, + json_data: Optional[Dict] = None, + expect_empty_response: bool = False +) -> Any: + """ + Makes an HTTP request to the LinkedIn API. + + Args: + method: HTTP method (GET, POST, etc.) + endpoint: API endpoint (should start with /) + json_data: JSON payload for POST/PUT requests + expect_empty_response: Whether to expect an empty response (for some operations) + + Returns: + Response data as dict, or None for empty responses + """ + url = f"{LINKEDIN_API_BASE}{endpoint}" + headers = _get_linkedin_headers() + + connector = aiohttp.TCPConnector(ssl=_get_ssl_context()) + async with aiohttp.ClientSession(headers=headers, connector=connector) as session: + try: + async with session.request(method, url, json=json_data) as response: + response.raise_for_status() + + if expect_empty_response: + if response.status in [200, 201, 204]: + return None + else: + logger.warning(f"Expected empty response for {method} {endpoint}, but got status {response.status}") + try: + return await response.json() + except aiohttp.ContentTypeError: + return await response.text() + else: + if 'application/json' in response.headers.get('Content-Type', ''): + return await response.json() + else: + text_content = await response.text() + logger.warning(f"Received non-JSON response for {method} {endpoint}: {text_content[:100]}...") + return {"raw_content": text_content} + + except aiohttp.ClientResponseError as e: + logger.error(f"LinkedIn API request failed: {e.status} {e.message} for {method} {url}") + error_details = e.message + try: + error_body = await e.response.json() + error_details = f"{e.message} - {error_body}" + except Exception: + pass + raise RuntimeError(f"LinkedIn API Error ({e.status}): {error_details}") from e + except Exception as e: + logger.error(f"An unexpected error occurred during LinkedIn API request: {e}") + raise RuntimeError(f"Unexpected error during API call to {method} {url}") from e diff --git a/mcp_servers/linkedin/tools/posts.py b/mcp_servers/linkedin/tools/posts.py new file mode 100644 index 00000000..659920ba --- /dev/null +++ b/mcp_servers/linkedin/tools/posts.py @@ -0,0 +1,233 @@ +import logging +import urllib.parse +from typing import Any, Dict, List, Optional +from .base import make_linkedin_request + +# Configure logging +logger = logging.getLogger(__name__) + +def _format_hashtags(hashtags: List[str]) -> str: + """Format hashtags for LinkedIn posts""" + formatted = [] + for tag in hashtags: + # Remove # if already present and add it back + clean_tag = tag.strip().lstrip('#') + if clean_tag: # Only add non-empty tags + formatted.append(f"#{clean_tag}") + return " ".join(formatted) + +def format_rich_post( + text: str, + bold_text: Optional[List[str]] = None, + italic_text: Optional[List[str]] = None, + bullet_points: Optional[List[str]] = None, + numbered_list: Optional[List[str]] = None, + hashtags: Optional[List[str]] = None, + mentions: Optional[List[str]] = None +) -> Dict[str, Any]: + """ + Format rich text for LinkedIn posts with various formatting options. + This is a utility function that doesn't make API calls. + """ + logger.info("Executing tool: format_rich_post") + try: + formatted_text = text + + # Apply bold formatting (LinkedIn uses **text** for bold) + if bold_text: + for bold in bold_text: + if bold in formatted_text: + formatted_text = formatted_text.replace(bold, f"**{bold}**") + + # Apply italic formatting (LinkedIn uses *text* for italic) + if italic_text: + for italic in italic_text: + if italic in formatted_text: + formatted_text = formatted_text.replace(italic, f"*{italic}*") + + # Add bullet points + if bullet_points: + bullet_section = "\n\n" + "\n".join([f"• {point}" for point in bullet_points]) + formatted_text += bullet_section + + # Add numbered list + if numbered_list: + numbered_section = "\n\n" + "\n".join([f"{i+1}. {item}" for i, item in enumerate(numbered_list)]) + formatted_text += numbered_section + + # Add mentions (LinkedIn uses @mention format) + if mentions: + mention_section = "\n\n" + " ".join([f"@{mention}" for mention in mentions]) + formatted_text += mention_section + + # Add hashtags + if hashtags: + hashtag_section = "\n\n" + _format_hashtags(hashtags) + formatted_text += hashtag_section + + result = { + "original_text": text, + "formatted_text": formatted_text, + "formatting_applied": { + "bold_count": len(bold_text) if bold_text else 0, + "italic_count": len(italic_text) if italic_text else 0, + "bullet_points": len(bullet_points) if bullet_points else 0, + "numbered_items": len(numbered_list) if numbered_list else 0, + "hashtags": len(hashtags) if hashtags else 0, + "mentions": len(mentions) if mentions else 0 + }, + "character_count": len(formatted_text), + "note": "This is formatted text ready for posting. Use linkedin_create_post to publish." + } + + return result + + except Exception as e: + logger.exception(f"Error executing tool format_rich_post: {e}") + return { + "error": "Rich text formatting failed", + "original_text": text, + "exception": str(e) + } + +async def create_post(text: str, title: Optional[str] = None, visibility: str = "PUBLIC", hashtags: Optional[List[str]] = None) -> Dict[str, Any]: + """Create a post on LinkedIn with optional title for article-style posts and hashtags.""" + tool_name = "create_article_post" if title else "create_text_post" + if hashtags: + tool_name += "_with_hashtags" + logger.info(f"Executing tool: {tool_name}") + try: + profile = await make_linkedin_request("GET", "/userinfo") + person_id = profile.get('sub') + + # Format hashtags if provided + hashtag_text = "" + if hashtags: + formatted_hashtags = _format_hashtags(hashtags) + if formatted_hashtags: + hashtag_text = f"\n\n{formatted_hashtags}" + + # Format content with title and hashtags if provided + content = text + if title: + content = f"{title}\n\n{text}" + content += hashtag_text + + endpoint = "/ugcPosts" + payload = { + "author": f"urn:li:person:{person_id}", + "lifecycleState": "PUBLISHED", + "specificContent": { + "com.linkedin.ugc.ShareContent": { + "shareCommentary": { + "text": content + }, + "shareMediaCategory": "NONE" + } + }, + "visibility": { + "com.linkedin.ugc.MemberNetworkVisibility": visibility + } + } + + post_data = await make_linkedin_request("POST", endpoint, json_data=payload) + result = { + "id": post_data.get("id"), + "created": post_data.get("created"), + "lastModified": post_data.get("lastModified"), + "lifecycleState": post_data.get("lifecycleState") + } + + if title: + result["title"] = title + result["note"] = "Created as text post with article format (title + content)" + + if hashtags: + formatted_hashtags = _format_hashtags(hashtags) + result["hashtags_used"] = formatted_hashtags + result["hashtag_count"] = len([h for h in hashtags if h.strip()]) + + return result + except Exception as e: + logger.exception(f"Error executing tool {tool_name}: {e}") + error_result = { + "error": "Post creation failed - likely due to insufficient permissions", + "text": text, + "note": "Requires 'w_member_social' scope in LinkedIn app settings", + "exception": str(e) + } + + if title: + error_result["title"] = title + error_result["error"] = "Article creation failed - trying alternative approach" + error_result["note"] = "Will attempt to create as formatted text post" + + return error_result + +async def create_url_share(url: str, text: str, title: Optional[str] = None, description: Optional[str] = None, visibility: str = "PUBLIC") -> Dict[str, Any]: + """Create a LinkedIn post that shares a URL with metadata preview""" + logger.info(f"Executing tool: create_url_share with URL: {url}") + try: + profile = await make_linkedin_request("GET", "/userinfo") + person_id = profile.get('sub') + + # Format content with title if provided + content = f"{title}\n\n{text}" if title else text + + endpoint = "/ugcPosts" + payload = { + "author": f"urn:li:person:{person_id}", + "lifecycleState": "PUBLISHED", + "specificContent": { + "com.linkedin.ugc.ShareContent": { + "shareCommentary": { + "text": content + }, + "shareMediaCategory": "ARTICLE", + "media": [ + { + "status": "READY", + "description": { + "text": description or "Shared link" + }, + "originalUrl": url, + "title": { + "text": title or "Shared Content" + } + } + ] + } + }, + "visibility": { + "com.linkedin.ugc.MemberNetworkVisibility": visibility + } + } + + post_data = await make_linkedin_request("POST", endpoint, json_data=payload) + result = { + "id": post_data.get("id"), + "created": post_data.get("created"), + "lastModified": post_data.get("lastModified"), + "lifecycleState": post_data.get("lifecycleState"), + "shared_url": url, + "url_title": title or "Shared Content", + "url_description": description or "Shared link" + } + + return result + except Exception as e: + logger.exception(f"Error executing tool create_url_share: {e}") + error_result = { + "error": "URL share creation failed - likely due to insufficient permissions or invalid URL", + "text": text, + "url": url, + "note": "Requires 'w_member_social' scope and valid URL format", + "exception": str(e) + } + + if title: + error_result["title"] = title + if description: + error_result["description"] = description + + return error_result diff --git a/mcp_servers/mailchimp/.env.example b/mcp_servers/mailchimp/.env.example new file mode 100644 index 00000000..6773c84f --- /dev/null +++ b/mcp_servers/mailchimp/.env.example @@ -0,0 +1,9 @@ +MAILCHIMP_API_KEY=your_mailchimp_api_key_here-us6 + +# Server Configuration +# Port for the MCP server to listen on +MAILCHIMP_MCP_SERVER_PORT=5002 + +# Optional: Logging level +# Options: DEBUG, INFO, WARNING, ERROR, CRITICAL +LOG_LEVEL=INFO \ No newline at end of file diff --git a/mcp_servers/mailchimp/Dockerfile b/mcp_servers/mailchimp/Dockerfile new file mode 100644 index 00000000..62c232b6 --- /dev/null +++ b/mcp_servers/mailchimp/Dockerfile @@ -0,0 +1,25 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/mailchimp/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy server files +COPY mcp_servers/mailchimp/server.py . +COPY mcp_servers/mailchimp/tools/ ./tools/ + +# Copy environment file (optional - can be overridden at runtime) +#COPY mcp_servers/mailchimp/.env .env + +# Expose the port the server runs on +EXPOSE 5001 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/mailchimp/README.md b/mcp_servers/mailchimp/README.md new file mode 100644 index 00000000..ca988f92 --- /dev/null +++ b/mcp_servers/mailchimp/README.md @@ -0,0 +1,78 @@ +# Mailchimp MCP Server + +A Model Context Protocol (MCP) server for Mailchimp integration. Manage email campaigns, audiences, and marketing automation using Mailchimp's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Mailchimp with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MAILCHIMP", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/mailchimp-mcp-server:latest + + +# Run Mailchimp MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/mailchimp-mcp-server:latest + + +# Run Mailchimp MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_mailchimp_api_key_here"}' \ + ghcr.io/klavis-ai/mailchimp-mcp-server:latest +``` + +**OAuth Setup:** Mailchimp requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Campaign Management**: Create, send, and manage email campaigns +- **Audience Operations**: Manage subscriber lists and audience segments +- **Automation**: Set up and manage email automation workflows +- **Analytics**: Access campaign performance and subscriber analytics +- **Template Management**: Create and manage email templates + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/mailchimp/requirements.txt b/mcp_servers/mailchimp/requirements.txt new file mode 100644 index 00000000..70531a2c --- /dev/null +++ b/mcp_servers/mailchimp/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +fastapi +uvicorn[standard] +click>=8.0.0 +pydantic>=2.5.0 +aiohttp>=3.8.0 +httpx>=0.27.0 +python-dotenv>=1.0.0 +typing-extensions +starlette>=0.27.0 \ No newline at end of file diff --git a/mcp_servers/mailchimp/server.py b/mcp_servers/mailchimp/server.py new file mode 100644 index 00000000..53d5dcfd --- /dev/null +++ b/mcp_servers/mailchimp/server.py @@ -0,0 +1,658 @@ +import os +import json +import logging +import asyncio +from typing import Any, Dict + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server + +from tools import ( + mailchimp_token_context, + ping_mailchimp, + get_account_info, + get_all_audiences, + create_audience, + get_audience_info, + update_audience, + delete_audience, + get_audience_members, + add_member_to_audience, + get_member_info, + update_member, + delete_member, + add_member_tags, + remove_member_tags, + get_member_activity, + get_all_campaigns, + create_campaign, + get_campaign_info, + set_campaign_content, + send_campaign, + schedule_campaign, + delete_campaign, +) + +# Load env early +load_dotenv() + +logger = logging.getLogger("mailchimp-mcp-server") +logging.basicConfig(level=logging.INFO) + +MAILCHIMP_API_KEY = os.getenv("MAILCHIMP_API_KEY") or "" + +async def run_server(log_level: str = "INFO"): + """Run the Mailchimp MCP server with stdio transport for Claude Desktop.""" + logging.getLogger().setLevel(getattr(logging, log_level.upper(), logging.INFO)) + + # Set the API key in context + if MAILCHIMP_API_KEY: + mailchimp_token_context.set(MAILCHIMP_API_KEY) + logger.info("Mailchimp API key configured") + else: + logger.warning("No Mailchimp API key found in environment") + + app = Server("mailchimp-mcp-server") + + # ----------------------------- Tool Registry -----------------------------# + @app.list_tools() + async def list_tools() -> list[types.Tool]: + """List all available Mailchimp tools.""" + tools = [ + # Auth/Account tools + types.Tool( + name="mailchimp_ping", + description="Test Mailchimp Connection - verify API authentication is working correctly.", + inputSchema={"type": "object", "properties": {}}, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_ACCOUNT", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_get_account_info", + description="Get Account Information - retrieve comprehensive account details.", + inputSchema={"type": "object", "properties": {}}, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_ACCOUNT", "readOnlyHint": True}) + ), + + # Audience/List management tools + types.Tool( + name="mailchimp_get_all_audiences", + description="Get All Audiences - retrieve all email lists in the account.", + inputSchema={ + "type": "object", + "properties": { + "count": {"type": "integer", "default": 10, "minimum": 1, "maximum": 1000}, + "offset": {"type": "integer", "default": 0, "minimum": 0} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_AUDIENCE", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_create_audience", + description="Create New Audience - create a new email list.", + inputSchema={ + "type": "object", + "required": ["name", "contact", "permission_reminder", "from_name", "from_email", "subject"], + "properties": { + "name": {"type": "string"}, + "contact": { + "type": "object", + "properties": { + "company": {"type": "string"}, + "address1": {"type": "string"}, + "city": {"type": "string"}, + "state": {"type": "string"}, + "zip": {"type": "string"}, + "country": {"type": "string"}, + "phone": {"type": "string"} + }, + "required": ["company", "address1", "city", "state", "zip", "country"] + }, + "permission_reminder": {"type": "string"}, + "from_name": {"type": "string"}, + "from_email": {"type": "string"}, + "subject": {"type": "string"}, + "language": {"type": "string", "default": "EN_US"}, + "email_type_option": {"type": "boolean", "default": False}, + "double_optin": {"type": "boolean", "default": False}, + "has_welcome": {"type": "boolean", "default": False} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_AUDIENCE"}) + ), + types.Tool( + name="mailchimp_get_audience_info", + description="Get Audience Details - detailed information about a specific audience.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": {"list_id": {"type": "string"}} + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_AUDIENCE", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_update_audience", + description="Update Audience Settings - modify audience configuration.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": {"type": "string"}, + "name": {"type": "string"}, + "contact": {"type": "object"}, + "permission_reminder": {"type": "string"}, + "from_name": {"type": "string"}, + "from_email": {"type": "string"}, + "subject": {"type": "string"}, + "language": {"type": "string"}, + "email_type_option": {"type": "boolean"}, + "double_optin": {"type": "boolean"}, + "has_welcome": {"type": "boolean"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_AUDIENCE"}) + ), + types.Tool( + name="mailchimp_delete_audience", + description="Delete Audience - permanently delete an audience and all subscribers.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": {"list_id": {"type": "string"}} + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_AUDIENCE"}) + ), + + # Member/Contact management tools + types.Tool( + name="mailchimp_get_audience_members", + description="Get Audience Members - retrieve contacts from an audience.", + inputSchema={ + "type": "object", + "required": ["list_id"], + "properties": { + "list_id": {"type": "string"}, + "count": {"type": "integer", "default": 10, "minimum": 1, "maximum": 1000}, + "offset": {"type": "integer", "default": 0, "minimum": 0}, + "status": { + "type": "string", + "enum": ["subscribed", "unsubscribed", "cleaned", "pending", "transactional"] + }, + "since_timestamp_opt": {"type": "string"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_add_member_to_audience", + description="Add Member to Audience - add a new contact to an audience.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"}, + "status": { + "type": "string", + "enum": ["subscribed", "unsubscribed", "cleaned", "pending", "transactional"], + "default": "subscribed" + }, + "merge_fields": {"type": "object"}, + "interests": {"type": "object"}, + "language": {"type": "string"}, + "vip": {"type": "boolean"}, + "tags": {"type": "array", "items": {"type": "string"}}, + "ip_signup": {"type": "string"}, + "timestamp_signup": {"type": "string"}, + "ip_opt": {"type": "string"}, + "timestamp_opt": {"type": "string"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER"}) + ), + types.Tool( + name="mailchimp_get_member_info", + description="Get Member Details - detailed information about a specific member.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_update_member", + description="Update Member - modify existing member information.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"}, + "status": { + "type": "string", + "enum": ["subscribed", "unsubscribed", "cleaned", "pending", "transactional"] + }, + "merge_fields": {"type": "object"}, + "interests": {"type": "object"}, + "language": {"type": "string"}, + "vip": {"type": "boolean"}, + "ip_opt": {"type": "string"}, + "timestamp_opt": {"type": "string"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER"}) + ), + types.Tool( + name="mailchimp_delete_member", + description="Delete Member - permanently remove a member from an audience.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER"}) + ), + types.Tool( + name="mailchimp_add_member_tags", + description="Add Member Tags - add organizational tags to a member.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address", "tags"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"}, + "tags": {"type": "array", "items": {"type": "string"}} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER"}) + ), + types.Tool( + name="mailchimp_remove_member_tags", + description="Remove Member Tags - remove tags from a specific member.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address", "tags"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"}, + "tags": {"type": "array", "items": {"type": "string"}} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER"}) + ), + types.Tool( + name="mailchimp_get_member_activity", + description="Get Member Activity - retrieve recent activity history for a member.", + inputSchema={ + "type": "object", + "required": ["list_id", "email_address"], + "properties": { + "list_id": {"type": "string"}, + "email_address": {"type": "string"}, + "count": {"type": "integer", "default": 10, "minimum": 1, "maximum": 50} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_MEMBER", "readOnlyHint": True}) + ), + + # Campaign management tools + types.Tool( + name="mailchimp_get_all_campaigns", + description="Get All Campaigns - retrieve campaigns with optional filtering.", + inputSchema={ + "type": "object", + "properties": { + "count": {"type": "integer", "default": 10, "minimum": 1, "maximum": 1000}, + "offset": {"type": "integer", "default": 0, "minimum": 0}, + "type": {"type": "string", "enum": ["regular", "plaintext", "absplit", "rss", "variate"]}, + "status": {"type": "string", "enum": ["save", "paused", "schedule", "sending", "sent", "canceled", "canceling", "archived"]}, + "before_send_time": {"type": "string"}, + "since_send_time": {"type": "string"}, + "before_create_time": {"type": "string"}, + "since_create_time": {"type": "string"}, + "list_id": {"type": "string"}, + "folder_id": {"type": "string"}, + "sort_field": {"type": "string", "enum": ["create_time", "send_time"]}, + "sort_dir": {"type": "string", "enum": ["ASC", "DESC"]} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_create_campaign", + description="Create Campaign - create a new email campaign in draft mode.", + inputSchema={ + "type": "object", + "required": ["type", "list_id", "subject_line", "from_name", "reply_to"], + "properties": { + "type": {"type": "string", "enum": ["regular", "plaintext", "absplit", "rss", "variate"]}, + "list_id": {"type": "string"}, + "subject_line": {"type": "string"}, + "from_name": {"type": "string"}, + "reply_to": {"type": "string"}, + "title": {"type": "string"}, + "folder_id": {"type": "string"}, + "authenticate": {"type": "boolean", "default": True}, + "auto_footer": {"type": "boolean", "default": True}, + "inline_css": {"type": "boolean", "default": True}, + "auto_tweet": {"type": "boolean", "default": False}, + "fb_comments": {"type": "boolean", "default": True}, + "timewarp": {"type": "boolean", "default": False}, + "template_id": {"type": "integer"}, + "drag_and_drop": {"type": "boolean", "default": True} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN"}) + ), + types.Tool( + name="mailchimp_get_campaign_info", + description="Get Campaign Details - detailed information about a campaign.", + inputSchema={ + "type": "object", + "required": ["campaign_id"], + "properties": {"campaign_id": {"type": "string"}} + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN", "readOnlyHint": True}) + ), + types.Tool( + name="mailchimp_set_campaign_content", + description="Set Campaign Content - add HTML content, plain text, or template to a campaign.", + inputSchema={ + "type": "object", + "required": ["campaign_id"], + "properties": { + "campaign_id": {"type": "string"}, + "html": {"type": "string"}, + "plain_text": {"type": "string"}, + "url": {"type": "string"}, + "template": {"type": "object"}, + "archive": {"type": "object"}, + "variate_contents": {"type": "array"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN"}) + ), + types.Tool( + name="mailchimp_send_campaign", + description="Send Campaign - send a campaign immediately to all recipients.", + inputSchema={ + "type": "object", + "required": ["campaign_id"], + "properties": {"campaign_id": {"type": "string"}} + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN"}) + ), + types.Tool( + name="mailchimp_schedule_campaign", + description="Schedule Campaign - schedule a campaign for delivery at a specific time.", + inputSchema={ + "type": "object", + "required": ["campaign_id", "schedule_time"], + "properties": { + "campaign_id": {"type": "string"}, + "schedule_time": {"type": "string"}, + "timewarp": {"type": "boolean", "default": False}, + "batch_delay": {"type": "integer"} + } + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN"}) + ), + types.Tool( + name="mailchimp_delete_campaign", + description="Delete Campaign - permanently delete a draft campaign.", + inputSchema={ + "type": "object", + "required": ["campaign_id"], + "properties": {"campaign_id": {"type": "string"}} + }, + annotations=types.ToolAnnotations(**{"category": "MAILCHIMP_CAMPAIGN"}) + ), + ] + + logger.info(f"Returning {len(tools)} tools") + return tools + + # ---------------------------- Tool Dispatcher ----------------------------# + @app.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]) -> list[types.TextContent]: + logger.info(f"Calling tool: {name}") + + try: + # Auth/Account tools + if name == "mailchimp_ping": + result = await ping_mailchimp() + elif name == "mailchimp_get_account_info": + result = await get_account_info() + + # Audience tools + elif name == "mailchimp_get_all_audiences": + result = await get_all_audiences( + count=arguments.get("count", 10), + offset=arguments.get("offset", 0) + ) + elif name == "mailchimp_create_audience": + required_args = ["name", "contact", "permission_reminder", "from_name", "from_email", "subject"] + for arg in required_args: + if arg not in arguments: + raise ValueError(f"Missing required argument: {arg}") + result = await create_audience( + name=arguments["name"], + contact=arguments["contact"], + permission_reminder=arguments["permission_reminder"], + from_name=arguments["from_name"], + from_email=arguments["from_email"], + subject=arguments["subject"], + language=arguments.get("language", "EN_US"), + email_type_option=arguments.get("email_type_option", False), + double_optin=arguments.get("double_optin", False), + has_welcome=arguments.get("has_welcome", False) + ) + elif name == "mailchimp_get_audience_info": + if not arguments.get("list_id"): + raise ValueError("Missing required argument: list_id") + result = await get_audience_info(arguments["list_id"]) + elif name == "mailchimp_update_audience": + if not arguments.get("list_id"): + raise ValueError("Missing required argument: list_id") + result = await update_audience( + list_id=arguments["list_id"], + name=arguments.get("name"), + contact=arguments.get("contact"), + permission_reminder=arguments.get("permission_reminder"), + from_name=arguments.get("from_name"), + from_email=arguments.get("from_email"), + subject=arguments.get("subject"), + language=arguments.get("language"), + email_type_option=arguments.get("email_type_option"), + double_optin=arguments.get("double_optin"), + has_welcome=arguments.get("has_welcome") + ) + elif name == "mailchimp_delete_audience": + if not arguments.get("list_id"): + raise ValueError("Missing required argument: list_id") + result = await delete_audience(arguments["list_id"]) + + # Member tools + elif name == "mailchimp_get_audience_members": + if not arguments.get("list_id"): + raise ValueError("Missing required argument: list_id") + result = await get_audience_members( + list_id=arguments["list_id"], + count=arguments.get("count", 10), + offset=arguments.get("offset", 0), + status=arguments.get("status"), + since_timestamp_opt=arguments.get("since_timestamp_opt") + ) + elif name == "mailchimp_add_member_to_audience": + if not arguments.get("list_id") or not arguments.get("email_address"): + raise ValueError("Missing required arguments: list_id and email_address") + result = await add_member_to_audience( + list_id=arguments["list_id"], + email_address=arguments["email_address"], + status=arguments.get("status", "subscribed"), + merge_fields=arguments.get("merge_fields"), + interests=arguments.get("interests"), + language=arguments.get("language"), + vip=arguments.get("vip"), + tags=arguments.get("tags"), + ip_signup=arguments.get("ip_signup"), + timestamp_signup=arguments.get("timestamp_signup"), + ip_opt=arguments.get("ip_opt"), + timestamp_opt=arguments.get("timestamp_opt") + ) + elif name == "mailchimp_get_member_info": + if not arguments.get("list_id") or not arguments.get("email_address"): + raise ValueError("Missing required arguments: list_id and email_address") + result = await get_member_info(arguments["list_id"], arguments["email_address"]) + elif name == "mailchimp_update_member": + if not arguments.get("list_id") or not arguments.get("email_address"): + raise ValueError("Missing required arguments: list_id and email_address") + result = await update_member( + list_id=arguments["list_id"], + email_address=arguments["email_address"], + status=arguments.get("status"), + merge_fields=arguments.get("merge_fields"), + interests=arguments.get("interests"), + language=arguments.get("language"), + vip=arguments.get("vip"), + ip_opt=arguments.get("ip_opt"), + timestamp_opt=arguments.get("timestamp_opt") + ) + elif name == "mailchimp_delete_member": + if not arguments.get("list_id") or not arguments.get("email_address"): + raise ValueError("Missing required arguments: list_id and email_address") + result = await delete_member(arguments["list_id"], arguments["email_address"]) + elif name == "mailchimp_add_member_tags": + if not all([arguments.get("list_id"), arguments.get("email_address"), arguments.get("tags")]): + raise ValueError("Missing required arguments: list_id, email_address, and tags") + result = await add_member_tags(arguments["list_id"], arguments["email_address"], arguments["tags"]) + elif name == "mailchimp_remove_member_tags": + if not all([arguments.get("list_id"), arguments.get("email_address"), arguments.get("tags")]): + raise ValueError("Missing required arguments: list_id, email_address, and tags") + result = await remove_member_tags(arguments["list_id"], arguments["email_address"], arguments["tags"]) + elif name == "mailchimp_get_member_activity": + if not arguments.get("list_id") or not arguments.get("email_address"): + raise ValueError("Missing required arguments: list_id and email_address") + result = await get_member_activity(arguments["list_id"], arguments["email_address"], arguments.get("count", 10)) + + # Campaign tools + elif name == "mailchimp_get_all_campaigns": + result = await get_all_campaigns( + count=arguments.get("count", 10), + offset=arguments.get("offset", 0), + type=arguments.get("type"), + status=arguments.get("status"), + before_send_time=arguments.get("before_send_time"), + since_send_time=arguments.get("since_send_time"), + before_create_time=arguments.get("before_create_time"), + since_create_time=arguments.get("since_create_time"), + list_id=arguments.get("list_id"), + folder_id=arguments.get("folder_id"), + sort_field=arguments.get("sort_field"), + sort_dir=arguments.get("sort_dir") + ) + elif name == "mailchimp_create_campaign": + required_args = ["type", "list_id", "subject_line", "from_name", "reply_to"] + for arg in required_args: + if arg not in arguments: + raise ValueError(f"Missing required argument: {arg}") + result = await create_campaign( + type=arguments["type"], + list_id=arguments["list_id"], + subject_line=arguments["subject_line"], + from_name=arguments["from_name"], + reply_to=arguments["reply_to"], + title=arguments.get("title"), + folder_id=arguments.get("folder_id"), + authenticate=arguments.get("authenticate", True), + auto_footer=arguments.get("auto_footer", True), + inline_css=arguments.get("inline_css", True), + auto_tweet=arguments.get("auto_tweet", False), + fb_comments=arguments.get("fb_comments", True), + timewarp=arguments.get("timewarp", False), + template_id=arguments.get("template_id"), + drag_and_drop=arguments.get("drag_and_drop", True) + ) + elif name == "mailchimp_get_campaign_info": + if not arguments.get("campaign_id"): + raise ValueError("Missing required argument: campaign_id") + result = await get_campaign_info(arguments["campaign_id"]) + elif name == "mailchimp_set_campaign_content": + if not arguments.get("campaign_id"): + raise ValueError("Missing required argument: campaign_id") + result = await set_campaign_content( + campaign_id=arguments["campaign_id"], + html=arguments.get("html"), + plain_text=arguments.get("plain_text"), + url=arguments.get("url"), + template=arguments.get("template"), + archive=arguments.get("archive"), + variate_contents=arguments.get("variate_contents") + ) + elif name == "mailchimp_send_campaign": + if not arguments.get("campaign_id"): + raise ValueError("Missing required argument: campaign_id") + result = await send_campaign(arguments["campaign_id"]) + elif name == "mailchimp_schedule_campaign": + if not arguments.get("campaign_id") or not arguments.get("schedule_time"): + raise ValueError("Missing required arguments: campaign_id and schedule_time") + result = await schedule_campaign( + campaign_id=arguments["campaign_id"], + schedule_time=arguments["schedule_time"], + timewarp=arguments.get("timewarp", False), + batch_delay=arguments.get("batch_delay") + ) + elif name == "mailchimp_delete_campaign": + if not arguments.get("campaign_id"): + raise ValueError("Missing required argument: campaign_id") + result = await delete_campaign(arguments["campaign_id"]) + + else: + error_msg = f"Unknown tool: {name}" + logger.error(error_msg) + return [types.TextContent(type="text", text=json.dumps({"error": error_msg}))] + + logger.info(f"Tool {name} executed successfully") + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + error_response = { + "error": f"Tool execution failed: {str(e)}", + "tool": name, + "arguments": arguments + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + + # Run with stdio transport for Claude Desktop + logger.info("Starting Mailchimp MCP server with stdio transport") + async with stdio_server() as (read_stream, write_stream): + await app.run(read_stream, write_stream, app.create_initialization_options()) + + +@click.command() +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +def main(log_level: str) -> int: + """Mailchimp MCP server with stdio transport for Claude Desktop.""" + try: + asyncio.run(run_server(log_level)) + return 0 + except KeyboardInterrupt: + logger.info("Server stopped by user") + return 0 + except Exception as e: + logger.error(f"Server error: {e}") + return 1 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/__init__.py b/mcp_servers/mailchimp/tools/__init__.py new file mode 100644 index 00000000..083385e6 --- /dev/null +++ b/mcp_servers/mailchimp/tools/__init__.py @@ -0,0 +1,63 @@ +from .auth import ping_mailchimp, get_account_info +from .audiences import ( + get_all_audiences, + create_audience, + get_audience_info, + update_audience, + delete_audience +) +from .members import ( + get_audience_members, + add_member_to_audience, + get_member_info, + update_member, + delete_member, + add_member_tags, + remove_member_tags, + get_member_activity +) +from .campaigns import ( + get_all_campaigns, + create_campaign, + get_campaign_info, + set_campaign_content, + send_campaign, + schedule_campaign, + delete_campaign +) +from .base import mailchimp_token_context + +__all__ = [ + # Auth/Account + "ping_mailchimp", + "get_account_info", + + # Audiences/Lists + "get_all_audiences", + "create_audience", + "get_audience_info", + "update_audience", + "delete_audience", + + # Members/Contacts + "get_audience_members", + "add_member_to_audience", + "get_member_info", + "update_member", + "delete_member", + "add_member_tags", + "remove_member_tags", + "get_member_activity", + + # Campaigns + "get_all_campaigns", + "create_campaign", + "get_campaign_info", + "set_campaign_content", + "send_campaign", + "schedule_campaign", + "delete_campaign", + + # Base + "mailchimp_token_context", +] \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/audiences.py b/mcp_servers/mailchimp/tools/audiences.py new file mode 100644 index 00000000..30a975d1 --- /dev/null +++ b/mcp_servers/mailchimp/tools/audiences.py @@ -0,0 +1,265 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_mailchimp_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_audiences(count: int = 10, offset: int = 0) -> Dict[str, Any]: + """Get information about all audiences (lists) in the account.""" + logger.info(f"Executing tool: get_all_audiences with count: {count}, offset: {offset}") + try: + endpoint = "/lists" + params = { + "count": count, + "offset": offset + } + + audiences_data = await make_mailchimp_request("GET", endpoint, params=params) + + result = { + "total_items": audiences_data.get("total_items"), + "audiences": [] + } + + for audience in audiences_data.get("lists", []): + audience_info = { + "id": audience.get("id"), + "name": audience.get("name"), + "date_created": audience.get("date_created"), + "list_rating": audience.get("list_rating"), + "stats": { + "member_count": audience.get("stats", {}).get("member_count"), + "unsubscribe_count": audience.get("stats", {}).get("unsubscribe_count"), + "cleaned_count": audience.get("stats", {}).get("cleaned_count"), + "member_count_since_send": audience.get("stats", {}).get("member_count_since_send"), + "unsubscribe_count_since_send": audience.get("stats", {}).get("unsubscribe_count_since_send"), + "cleaned_count_since_send": audience.get("stats", {}).get("cleaned_count_since_send"), + "campaign_count": audience.get("stats", {}).get("campaign_count"), + "campaign_last_sent": audience.get("stats", {}).get("campaign_last_sent"), + "merge_field_count": audience.get("stats", {}).get("merge_field_count"), + "avg_sub_rate": audience.get("stats", {}).get("avg_sub_rate"), + "avg_unsub_rate": audience.get("stats", {}).get("avg_unsub_rate"), + "target_sub_rate": audience.get("stats", {}).get("target_sub_rate"), + "open_rate": audience.get("stats", {}).get("open_rate"), + "click_rate": audience.get("stats", {}).get("click_rate"), + "last_sub_date": audience.get("stats", {}).get("last_sub_date"), + "last_unsub_date": audience.get("stats", {}).get("last_unsub_date") + }, + "subscribe_url_short": audience.get("subscribe_url_short"), + "subscribe_url_long": audience.get("subscribe_url_long"), + "beamer_address": audience.get("beamer_address"), + "visibility": audience.get("visibility"), + "double_optin": audience.get("double_optin"), + "has_welcome": audience.get("has_welcome"), + "marketing_permissions": audience.get("marketing_permissions") + } + result["audiences"].append(audience_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_all_audiences: {e}") + return { + "error": "Failed to retrieve audiences", + "exception": str(e) + } + +async def create_audience( + name: str, + contact: Dict[str, str], + permission_reminder: str, + from_name: str, + from_email: str, + subject: str, + language: str = "EN_US", + email_type_option: bool = False, + double_optin: bool = False, + has_welcome: bool = False +) -> Dict[str, Any]: + """Create a new audience (list) in Mailchimp.""" + logger.info(f"Executing tool: create_audience with name: {name}") + try: + endpoint = "/lists" + + payload = { + "name": name, + "contact": contact, + "permission_reminder": permission_reminder, + "campaign_defaults": { + "from_name": from_name, + "from_email": from_email, + "subject": subject, + "language": language + }, + "email_type_option": email_type_option, + "double_optin": double_optin, + "has_welcome": has_welcome + } + + audience_data = await make_mailchimp_request("POST", endpoint, json_data=payload) + + result = { + "id": audience_data.get("id"), + "name": audience_data.get("name"), + "date_created": audience_data.get("date_created"), + "list_rating": audience_data.get("list_rating"), + "subscribe_url_short": audience_data.get("subscribe_url_short"), + "subscribe_url_long": audience_data.get("subscribe_url_long"), + "beamer_address": audience_data.get("beamer_address"), + "visibility": audience_data.get("visibility"), + "double_optin": audience_data.get("double_optin"), + "has_welcome": audience_data.get("has_welcome"), + "permission_reminder": audience_data.get("permission_reminder"), + "from_name": from_name, + "from_email": from_email, + "contact": contact + } + + return result + except Exception as e: + logger.exception(f"Error executing tool create_audience: {e}") + return { + "error": "Failed to create audience", + "name": name, + "exception": str(e) + } + +async def get_audience_info(list_id: str) -> Dict[str, Any]: + """Get information about a specific audience (list).""" + logger.info(f"Executing tool: get_audience_info with list_id: {list_id}") + try: + endpoint = f"/lists/{list_id}" + + audience_data = await make_mailchimp_request("GET", endpoint) + + result = { + "id": audience_data.get("id"), + "name": audience_data.get("name"), + "date_created": audience_data.get("date_created"), + "list_rating": audience_data.get("list_rating"), + "stats": audience_data.get("stats", {}), + "subscribe_url_short": audience_data.get("subscribe_url_short"), + "subscribe_url_long": audience_data.get("subscribe_url_long"), + "beamer_address": audience_data.get("beamer_address"), + "visibility": audience_data.get("visibility"), + "double_optin": audience_data.get("double_optin"), + "has_welcome": audience_data.get("has_welcome"), + "permission_reminder": audience_data.get("permission_reminder"), + "use_archive_bar": audience_data.get("use_archive_bar"), + "notify_on_subscribe": audience_data.get("notify_on_subscribe"), + "notify_on_unsubscribe": audience_data.get("notify_on_unsubscribe"), + "marketing_permissions": audience_data.get("marketing_permissions"), + "contact": audience_data.get("contact", {}), + "campaign_defaults": audience_data.get("campaign_defaults", {}) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_audience_info: {e}") + return { + "error": "Failed to retrieve audience information", + "list_id": list_id, + "exception": str(e) + } + +async def update_audience( + list_id: str, + name: Optional[str] = None, + contact: Optional[Dict[str, str]] = None, + permission_reminder: Optional[str] = None, + from_name: Optional[str] = None, + from_email: Optional[str] = None, + subject: Optional[str] = None, + language: Optional[str] = None, + email_type_option: Optional[bool] = None, + double_optin: Optional[bool] = None, + has_welcome: Optional[bool] = None +) -> Dict[str, Any]: + """Update settings for a specific audience (list).""" + logger.info(f"Executing tool: update_audience with list_id: {list_id}") + try: + endpoint = f"/lists/{list_id}" + + # Build payload with only provided fields + payload = {} + + if name is not None: + payload["name"] = name + if contact is not None: + payload["contact"] = contact + if permission_reminder is not None: + payload["permission_reminder"] = permission_reminder + if email_type_option is not None: + payload["email_type_option"] = email_type_option + if double_optin is not None: + payload["double_optin"] = double_optin + if has_welcome is not None: + payload["has_welcome"] = has_welcome + + # Handle campaign_defaults separately + campaign_defaults = {} + if from_name is not None: + campaign_defaults["from_name"] = from_name + if from_email is not None: + campaign_defaults["from_email"] = from_email + if subject is not None: + campaign_defaults["subject"] = subject + if language is not None: + campaign_defaults["language"] = language + + if campaign_defaults: + payload["campaign_defaults"] = campaign_defaults + + if not payload: + return { + "error": "No update parameters provided", + "list_id": list_id + } + + audience_data = await make_mailchimp_request("PATCH", endpoint, json_data=payload) + + result = { + "id": audience_data.get("id"), + "name": audience_data.get("name"), + "date_created": audience_data.get("date_created"), + "updated_fields": list(payload.keys()), + "stats": audience_data.get("stats", {}), + "contact": audience_data.get("contact", {}), + "campaign_defaults": audience_data.get("campaign_defaults", {}), + "permission_reminder": audience_data.get("permission_reminder"), + "double_optin": audience_data.get("double_optin"), + "has_welcome": audience_data.get("has_welcome") + } + + return result + except Exception as e: + logger.exception(f"Error executing tool update_audience: {e}") + return { + "error": "Failed to update audience", + "list_id": list_id, + "exception": str(e) + } + +async def delete_audience(list_id: str) -> Dict[str, Any]: + """Delete an audience (list) from Mailchimp account.""" + logger.info(f"Executing tool: delete_audience with list_id: {list_id}") + try: + endpoint = f"/lists/{list_id}" + + await make_mailchimp_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Audience {list_id} has been deleted", + "list_id": list_id, + "warning": "This action is permanent. List history including subscriber activity, unsubscribes, complaints, and bounces have been lost." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_audience: {e}") + return { + "error": "Failed to delete audience", + "list_id": list_id, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/auth.py b/mcp_servers/mailchimp/tools/auth.py new file mode 100644 index 00000000..05119f7f --- /dev/null +++ b/mcp_servers/mailchimp/tools/auth.py @@ -0,0 +1,64 @@ +import logging +from typing import Any, Dict +from .base import make_mailchimp_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def ping_mailchimp() -> Dict[str, Any]: + """Test the connection to Mailchimp API and verify authentication.""" + logger.info("Executing tool: ping_mailchimp") + try: + endpoint = "/ping" + + ping_data = await make_mailchimp_request("GET", endpoint) + + result = { + "status": "success", + "health_status": ping_data.get("health_status"), + "message": "Mailchimp API connection successful" + } + + return result + except Exception as e: + logger.exception(f"Error executing tool ping_mailchimp: {e}") + return { + "status": "error", + "message": "Failed to connect to Mailchimp API", + "error": str(e) + } + +async def get_account_info() -> Dict[str, Any]: + """Get information about the Mailchimp account.""" + logger.info("Executing tool: get_account_info") + try: + endpoint = "/" + + account_data = await make_mailchimp_request("GET", endpoint) + + account_info = { + "account_id": account_data.get("account_id"), + "account_name": account_data.get("account_name"), + "username": account_data.get("username"), + "email": account_data.get("email"), + "first_name": account_data.get("first_name"), + "last_name": account_data.get("last_name"), + "avatar_url": account_data.get("avatar_url"), + "role": account_data.get("role"), + "member_since": account_data.get("member_since"), + "pricing_plan_type": account_data.get("pricing_plan_type"), + "industry": account_data.get("industry"), + "timezone": account_data.get("timezone"), + "total_subscribers": account_data.get("total_subscribers"), + "contact": account_data.get("contact", {}), + "pro_enabled": account_data.get("pro_enabled"), + "last_login": account_data.get("last_login") + } + + return account_info + except Exception as e: + logger.exception(f"Error executing tool get_account_info: {e}") + return { + "error": "Failed to retrieve account information", + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/base.py b/mcp_servers/mailchimp/tools/base.py new file mode 100644 index 00000000..83b6cf5d --- /dev/null +++ b/mcp_servers/mailchimp/tools/base.py @@ -0,0 +1,117 @@ +import os +import logging +import ssl +from typing import Any, Dict, Optional +from contextvars import ContextVar +import aiohttp + +# Configure logging +logger = logging.getLogger(__name__) + +# Context variable to store the Mailchimp API key for each request +mailchimp_token_context: ContextVar[str] = ContextVar('mailchimp_token') + +def get_mailchimp_api_key() -> str: + """Get the Mailchimp API key from context or environment.""" + try: + # Try to get from context first (for MCP server usage) + return mailchimp_token_context.get() + except LookupError: + # Fall back to environment variable (for standalone usage) + api_key = os.getenv("MAILCHIMP_API_KEY") + if not api_key: + raise RuntimeError("Mailchimp API key not found in request context or environment") + return api_key + +def _extract_datacenter_from_api_key(api_key: str) -> str: + """Extract the datacenter from the Mailchimp API key.""" + # Mailchimp API keys are formatted as: key-datacenter (e.g., abc123-us6) + if '-' in api_key: + return api_key.split('-')[-1] + else: + raise ValueError("Invalid Mailchimp API key format. Expected format: key-datacenter") + +def _get_mailchimp_base_url() -> str: + """Get the base URL for Mailchimp API based on the datacenter.""" + api_key = get_mailchimp_api_key() + datacenter = _extract_datacenter_from_api_key(api_key) + return f"https://{datacenter}.api.mailchimp.com/3.0" + +def _get_mailchimp_headers() -> Dict[str, str]: + """Create standard headers for Mailchimp API calls.""" + return { + "Content-Type": "application/json" + } + +def _get_ssl_context(): + """Create secure SSL context.""" + return ssl.create_default_context() + +async def make_mailchimp_request( + method: str, + endpoint: str, + json_data: Optional[Dict] = None, + params: Optional[Dict] = None, + expect_empty_response: bool = False +) -> Any: + """ + Makes an HTTP request to the Mailchimp API. + + Args: + method: HTTP method (GET, POST, PATCH, DELETE) + endpoint: API endpoint (should start with /) + json_data: JSON payload for POST/PATCH requests + params: Query parameters for GET requests + expect_empty_response: Whether to expect an empty response (for some operations) + + Returns: + Response data as dict, or None for empty responses + """ + base_url = _get_mailchimp_base_url() + url = f"{base_url}{endpoint}" + headers = _get_mailchimp_headers() + api_key = get_mailchimp_api_key() + + # Mailchimp uses HTTP Basic Auth with 'anystring' as username and API key as password + auth = aiohttp.BasicAuth('anystring', api_key) + + connector = aiohttp.TCPConnector(ssl=_get_ssl_context()) + async with aiohttp.ClientSession(headers=headers, connector=connector, auth=auth) as session: + try: + async with session.request(method, url, json=json_data, params=params) as response: + response.raise_for_status() + + if expect_empty_response: + if response.status in [200, 201, 204]: + return None + else: + logger.warning(f"Expected empty response for {method} {endpoint}, but got status {response.status}") + try: + return await response.json() + except aiohttp.ContentTypeError: + return await response.text() + else: + if 'application/json' in response.headers.get('Content-Type', ''): + return await response.json() + else: + text_content = await response.text() + logger.warning(f"Received non-JSON response for {method} {endpoint}: {text_content[:100]}...") + return {"raw_content": text_content} + + except aiohttp.ClientResponseError as e: + logger.error(f"Mailchimp API request failed: {e.status} {e.message} for {method} {url}") + error_details = e.message + try: + error_body = await e.response.json() + if 'detail' in error_body: + error_details = error_body['detail'] + elif 'title' in error_body: + error_details = error_body['title'] + else: + error_details = f"{e.message} - {error_body}" + except Exception: + pass + raise RuntimeError(f"Mailchimp API Error ({e.status}): {error_details}") from e + except Exception as e: + logger.error(f"An unexpected error occurred during Mailchimp API request: {e}") + raise RuntimeError(f"Unexpected error during API call to {method} {url}") from e \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/campaigns.py b/mcp_servers/mailchimp/tools/campaigns.py new file mode 100644 index 00000000..07284a9f --- /dev/null +++ b/mcp_servers/mailchimp/tools/campaigns.py @@ -0,0 +1,359 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_mailchimp_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_all_campaigns( + count: int = 10, + offset: int = 0, + type: Optional[str] = None, + status: Optional[str] = None, + before_send_time: Optional[str] = None, + since_send_time: Optional[str] = None, + before_create_time: Optional[str] = None, + since_create_time: Optional[str] = None, + list_id: Optional[str] = None, + folder_id: Optional[str] = None, + sort_field: Optional[str] = None, + sort_dir: Optional[str] = None +) -> Dict[str, Any]: + """Get all campaigns in the account with optional filtering.""" + logger.info(f"Executing tool: get_all_campaigns with count: {count}, offset: {offset}") + try: + endpoint = "/campaigns" + params = { + "count": count, + "offset": offset + } + + # Add optional filters + if type: + params["type"] = type + if status: + params["status"] = status + if before_send_time: + params["before_send_time"] = before_send_time + if since_send_time: + params["since_send_time"] = since_send_time + if before_create_time: + params["before_create_time"] = before_create_time + if since_create_time: + params["since_create_time"] = since_create_time + if list_id: + params["list_id"] = list_id + if folder_id: + params["folder_id"] = folder_id + if sort_field: + params["sort_field"] = sort_field + if sort_dir: + params["sort_dir"] = sort_dir + + campaigns_data = await make_mailchimp_request("GET", endpoint, params=params) + + result = { + "total_items": campaigns_data.get("total_items"), + "campaigns": [] + } + + for campaign in campaigns_data.get("campaigns", []): + campaign_info = { + "id": campaign.get("id"), + "web_id": campaign.get("web_id"), + "parent_campaign_id": campaign.get("parent_campaign_id"), + "type": campaign.get("type"), + "create_time": campaign.get("create_time"), + "archive_url": campaign.get("archive_url"), + "long_archive_url": campaign.get("long_archive_url"), + "status": campaign.get("status"), + "emails_sent": campaign.get("emails_sent"), + "send_time": campaign.get("send_time"), + "content_type": campaign.get("content_type"), + "needs_block_refresh": campaign.get("needs_block_refresh"), + "resendable": campaign.get("resendable"), + "recipients": campaign.get("recipients", {}), + "settings": campaign.get("settings", {}), + "tracking": campaign.get("tracking", {}), + "report_summary": campaign.get("report_summary", {}), + "delivery_status": campaign.get("delivery_status", {}) + } + result["campaigns"].append(campaign_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_all_campaigns: {e}") + return { + "error": "Failed to retrieve campaigns", + "exception": str(e) + } + +async def create_campaign( + type: str, + list_id: str, + subject_line: str, + from_name: str, + reply_to: str, + title: Optional[str] = None, + folder_id: Optional[str] = None, + authenticate: bool = True, + auto_footer: bool = True, + inline_css: bool = True, + auto_tweet: bool = False, + fb_comments: bool = True, + timewarp: bool = False, + template_id: Optional[int] = None, + drag_and_drop: bool = True +) -> Dict[str, Any]: + """Create a new Mailchimp campaign.""" + logger.info(f"Executing tool: create_campaign with type: {type}, list_id: {list_id}, subject: {subject_line}") + try: + endpoint = "/campaigns" + + payload = { + "type": type, + "recipients": { + "list_id": list_id + }, + "settings": { + "subject_line": subject_line, + "from_name": from_name, + "reply_to": reply_to, + "authenticate": authenticate, + "auto_footer": auto_footer, + "inline_css": inline_css, + "auto_tweet": auto_tweet, + "fb_comments": fb_comments, + "timewarp": timewarp, + "drag_and_drop": drag_and_drop + } + } + + if title: + payload["settings"]["title"] = title + if folder_id: + payload["settings"]["folder_id"] = folder_id + if template_id: + payload["settings"]["template_id"] = template_id + + campaign_data = await make_mailchimp_request("POST", endpoint, json_data=payload) + + result = { + "id": campaign_data.get("id"), + "web_id": campaign_data.get("web_id"), + "type": campaign_data.get("type"), + "create_time": campaign_data.get("create_time"), + "archive_url": campaign_data.get("archive_url"), + "long_archive_url": campaign_data.get("long_archive_url"), + "status": campaign_data.get("status"), + "emails_sent": campaign_data.get("emails_sent"), + "content_type": campaign_data.get("content_type"), + "recipients": campaign_data.get("recipients", {}), + "settings": campaign_data.get("settings", {}), + "tracking": campaign_data.get("tracking", {}), + "delivery_status": campaign_data.get("delivery_status", {}), + "list_id": list_id, + "subject_line": subject_line, + "from_name": from_name, + "note": "Campaign created successfully. Use set_campaign_content to add content, then send_campaign to send it." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool create_campaign: {e}") + return { + "error": "Failed to create campaign", + "type": type, + "list_id": list_id, + "subject_line": subject_line, + "exception": str(e) + } + +async def get_campaign_info(campaign_id: str) -> Dict[str, Any]: + """Get information about a specific campaign.""" + logger.info(f"Executing tool: get_campaign_info with campaign_id: {campaign_id}") + try: + endpoint = f"/campaigns/{campaign_id}" + + campaign_data = await make_mailchimp_request("GET", endpoint) + + result = { + "id": campaign_data.get("id"), + "web_id": campaign_data.get("web_id"), + "parent_campaign_id": campaign_data.get("parent_campaign_id"), + "type": campaign_data.get("type"), + "create_time": campaign_data.get("create_time"), + "archive_url": campaign_data.get("archive_url"), + "long_archive_url": campaign_data.get("long_archive_url"), + "status": campaign_data.get("status"), + "emails_sent": campaign_data.get("emails_sent"), + "send_time": campaign_data.get("send_time"), + "content_type": campaign_data.get("content_type"), + "needs_block_refresh": campaign_data.get("needs_block_refresh"), + "resendable": campaign_data.get("resendable"), + "recipients": campaign_data.get("recipients", {}), + "settings": campaign_data.get("settings", {}), + "tracking": campaign_data.get("tracking", {}), + "report_summary": campaign_data.get("report_summary", {}), + "delivery_status": campaign_data.get("delivery_status", {}), + "ab_split_opts": campaign_data.get("ab_split_opts", {}), + "social_card": campaign_data.get("social_card", {}), + "rss_opts": campaign_data.get("rss_opts", {}), + "variate_settings": campaign_data.get("variate_settings", {}) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_campaign_info: {e}") + return { + "error": "Failed to retrieve campaign information", + "campaign_id": campaign_id, + "exception": str(e) + } + +async def set_campaign_content( + campaign_id: str, + html: Optional[str] = None, + plain_text: Optional[str] = None, + url: Optional[str] = None, + template: Optional[Dict[str, Any]] = None, + archive: Optional[Dict[str, Any]] = None, + variate_contents: Optional[List[Dict[str, Any]]] = None +) -> Dict[str, Any]: + """Set the content for a campaign.""" + logger.info(f"Executing tool: set_campaign_content with campaign_id: {campaign_id}") + try: + endpoint = f"/campaigns/{campaign_id}/content" + + payload = {} + + if html: + payload["html"] = html + if plain_text: + payload["plain_text"] = plain_text + if url: + payload["url"] = url + if template: + payload["template"] = template + if archive: + payload["archive"] = archive + if variate_contents: + payload["variate_contents"] = variate_contents + + if not payload: + return { + "error": "No content provided", + "campaign_id": campaign_id, + "note": "At least one content type (html, plain_text, url, template, archive, or variate_contents) must be provided" + } + + content_data = await make_mailchimp_request("PUT", endpoint, json_data=payload) + + result = { + "variate_contents": content_data.get("variate_contents", []), + "html": content_data.get("html"), + "plain_text": content_data.get("plain_text"), + "archive_html": content_data.get("archive_html"), + "campaign_id": campaign_id, + "content_set": list(payload.keys()), + "note": "Campaign content has been set successfully. Campaign is now ready to send." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool set_campaign_content: {e}") + return { + "error": "Failed to set campaign content", + "campaign_id": campaign_id, + "exception": str(e) + } + +async def send_campaign(campaign_id: str) -> Dict[str, Any]: + """Send a Mailchimp campaign immediately.""" + logger.info(f"Executing tool: send_campaign with campaign_id: {campaign_id}") + try: + endpoint = f"/campaigns/{campaign_id}/actions/send" + + await make_mailchimp_request("POST", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Campaign {campaign_id} has been sent successfully", + "campaign_id": campaign_id, + "action": "sent", + "note": "Campaign is now being delivered to recipients. Check campaign reports for delivery status." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool send_campaign: {e}") + return { + "error": "Failed to send campaign", + "campaign_id": campaign_id, + "note": "Ensure campaign has content and passes send checklist requirements", + "exception": str(e) + } + +async def schedule_campaign(campaign_id: str, schedule_time: str, timewarp: bool = False, batch_delay: Optional[int] = None) -> Dict[str, Any]: + """Schedule a campaign for delivery at a specific time.""" + logger.info(f"Executing tool: schedule_campaign with campaign_id: {campaign_id}, schedule_time: {schedule_time}") + try: + endpoint = f"/campaigns/{campaign_id}/actions/schedule" + + payload = { + "schedule_time": schedule_time, + "timewarp": timewarp + } + + if batch_delay: + payload["batch_delay"] = batch_delay + + await make_mailchimp_request("POST", endpoint, json_data=payload, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Campaign {campaign_id} has been scheduled successfully", + "campaign_id": campaign_id, + "schedule_time": schedule_time, + "timewarp": timewarp, + "batch_delay": batch_delay, + "action": "scheduled", + "note": "Campaign is scheduled for delivery. Use unschedule if you need to make changes." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool schedule_campaign: {e}") + return { + "error": "Failed to schedule campaign", + "campaign_id": campaign_id, + "schedule_time": schedule_time, + "note": "Ensure campaign has content and passes send checklist requirements", + "exception": str(e) + } + +async def delete_campaign(campaign_id: str) -> Dict[str, Any]: + """Delete a campaign from Mailchimp account.""" + logger.info(f"Executing tool: delete_campaign with campaign_id: {campaign_id}") + try: + endpoint = f"/campaigns/{campaign_id}" + + await make_mailchimp_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Campaign {campaign_id} has been deleted successfully", + "campaign_id": campaign_id, + "action": "deleted", + "warning": "This action is permanent. Campaign content and statistics have been removed." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_campaign: {e}") + return { + "error": "Failed to delete campaign", + "campaign_id": campaign_id, + "note": "Only draft campaigns can be deleted. Sent campaigns cannot be deleted.", + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/mailchimp/tools/members.py b/mcp_servers/mailchimp/tools/members.py new file mode 100644 index 00000000..1e955091 --- /dev/null +++ b/mcp_servers/mailchimp/tools/members.py @@ -0,0 +1,421 @@ +import logging +import hashlib +from typing import Any, Dict, List, Optional +from .base import make_mailchimp_request + +# Configure logging +logger = logging.getLogger(__name__) + +def _get_subscriber_hash(email: str) -> str: + """Generate MD5 hash of lowercase email for Mailchimp subscriber_hash.""" + return hashlib.md5(email.lower().encode()).hexdigest() + +async def get_audience_members( + list_id: str, + count: int = 10, + offset: int = 0, + status: Optional[str] = None, + since_timestamp_opt: Optional[str] = None +) -> Dict[str, Any]: + """Get members from a specific audience (list).""" + logger.info(f"Executing tool: get_audience_members with list_id: {list_id}, count: {count}, offset: {offset}") + try: + endpoint = f"/lists/{list_id}/members" + params = { + "count": count, + "offset": offset + } + + if status: + params["status"] = status + if since_timestamp_opt: + params["since_timestamp_opt"] = since_timestamp_opt + + members_data = await make_mailchimp_request("GET", endpoint, params=params) + + result = { + "list_id": list_id, + "total_items": members_data.get("total_items"), + "members": [] + } + + for member in members_data.get("members", []): + member_info = { + "id": member.get("id"), + "email_address": member.get("email_address"), + "unique_email_id": member.get("unique_email_id"), + "contact_id": member.get("contact_id"), + "full_name": member.get("full_name"), + "web_id": member.get("web_id"), + "email_type": member.get("email_type"), + "status": member.get("status"), + "unsubscribe_reason": member.get("unsubscribe_reason"), + "consents_to_one_to_one_messaging": member.get("consents_to_one_to_one_messaging"), + "merge_fields": member.get("merge_fields", {}), + "interests": member.get("interests", {}), + "stats": member.get("stats", {}), + "ip_signup": member.get("ip_signup"), + "timestamp_signup": member.get("timestamp_signup"), + "ip_opt": member.get("ip_opt"), + "timestamp_opt": member.get("timestamp_opt"), + "member_rating": member.get("member_rating"), + "last_changed": member.get("last_changed"), + "language": member.get("language"), + "vip": member.get("vip"), + "email_client": member.get("email_client"), + "location": member.get("location", {}), + "marketing_permissions": member.get("marketing_permissions", []), + "last_note": member.get("last_note", {}), + "source": member.get("source"), + "tags_count": member.get("tags_count"), + "tags": member.get("tags", []) + } + result["members"].append(member_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_audience_members: {e}") + return { + "error": "Failed to retrieve audience members", + "list_id": list_id, + "exception": str(e) + } + +async def add_member_to_audience( + list_id: str, + email_address: str, + status: str = "subscribed", + merge_fields: Optional[Dict[str, str]] = None, + interests: Optional[Dict[str, bool]] = None, + language: Optional[str] = None, + vip: Optional[bool] = None, + tags: Optional[List[str]] = None, + ip_signup: Optional[str] = None, + timestamp_signup: Optional[str] = None, + ip_opt: Optional[str] = None, + timestamp_opt: Optional[str] = None +) -> Dict[str, Any]: + """Add a new member to an audience or update existing member.""" + logger.info(f"Executing tool: add_member_to_audience with list_id: {list_id}, email: {email_address}") + try: + endpoint = f"/lists/{list_id}/members" + + payload = { + "email_address": email_address, + "status": status + } + + if merge_fields: + payload["merge_fields"] = merge_fields + if interests: + payload["interests"] = interests + if language: + payload["language"] = language + if vip is not None: + payload["vip"] = vip + if tags: + payload["tags"] = tags + if ip_signup: + payload["ip_signup"] = ip_signup + if timestamp_signup: + payload["timestamp_signup"] = timestamp_signup + if ip_opt: + payload["ip_opt"] = ip_opt + if timestamp_opt: + payload["timestamp_opt"] = timestamp_opt + + member_data = await make_mailchimp_request("POST", endpoint, json_data=payload) + + result = { + "id": member_data.get("id"), + "email_address": member_data.get("email_address"), + "unique_email_id": member_data.get("unique_email_id"), + "contact_id": member_data.get("contact_id"), + "full_name": member_data.get("full_name"), + "web_id": member_data.get("web_id"), + "email_type": member_data.get("email_type"), + "status": member_data.get("status"), + "merge_fields": member_data.get("merge_fields", {}), + "interests": member_data.get("interests", {}), + "stats": member_data.get("stats", {}), + "ip_signup": member_data.get("ip_signup"), + "timestamp_signup": member_data.get("timestamp_signup"), + "ip_opt": member_data.get("ip_opt"), + "timestamp_opt": member_data.get("timestamp_opt"), + "member_rating": member_data.get("member_rating"), + "last_changed": member_data.get("last_changed"), + "language": member_data.get("language"), + "vip": member_data.get("vip"), + "email_client": member_data.get("email_client"), + "location": member_data.get("location", {}), + "source": member_data.get("source"), + "tags_count": member_data.get("tags_count"), + "tags": member_data.get("tags", []), + "list_id": list_id + } + + return result + except Exception as e: + logger.exception(f"Error executing tool add_member_to_audience: {e}") + return { + "error": "Failed to add member to audience", + "list_id": list_id, + "email_address": email_address, + "note": "Try using update_member if member already exists", + "exception": str(e) + } + +async def get_member_info(list_id: str, email_address: str) -> Dict[str, Any]: + """Get information about a specific audience member.""" + logger.info(f"Executing tool: get_member_info with list_id: {list_id}, email: {email_address}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}" + + member_data = await make_mailchimp_request("GET", endpoint) + + result = { + "id": member_data.get("id"), + "email_address": member_data.get("email_address"), + "unique_email_id": member_data.get("unique_email_id"), + "contact_id": member_data.get("contact_id"), + "full_name": member_data.get("full_name"), + "web_id": member_data.get("web_id"), + "email_type": member_data.get("email_type"), + "status": member_data.get("status"), + "unsubscribe_reason": member_data.get("unsubscribe_reason"), + "consents_to_one_to_one_messaging": member_data.get("consents_to_one_to_one_messaging"), + "merge_fields": member_data.get("merge_fields", {}), + "interests": member_data.get("interests", {}), + "stats": member_data.get("stats", {}), + "ip_signup": member_data.get("ip_signup"), + "timestamp_signup": member_data.get("timestamp_signup"), + "ip_opt": member_data.get("ip_opt"), + "timestamp_opt": member_data.get("timestamp_opt"), + "member_rating": member_data.get("member_rating"), + "last_changed": member_data.get("last_changed"), + "language": member_data.get("language"), + "vip": member_data.get("vip"), + "email_client": member_data.get("email_client"), + "location": member_data.get("location", {}), + "marketing_permissions": member_data.get("marketing_permissions", []), + "last_note": member_data.get("last_note", {}), + "source": member_data.get("source"), + "tags_count": member_data.get("tags_count"), + "tags": member_data.get("tags", []), + "list_id": list_id, + "subscriber_hash": subscriber_hash + } + + return result + except Exception as e: + logger.exception(f"Error executing tool get_member_info: {e}") + return { + "error": "Failed to retrieve member information", + "list_id": list_id, + "email_address": email_address, + "exception": str(e) + } + +async def update_member( + list_id: str, + email_address: str, + status: Optional[str] = None, + merge_fields: Optional[Dict[str, str]] = None, + interests: Optional[Dict[str, bool]] = None, + language: Optional[str] = None, + vip: Optional[bool] = None, + ip_opt: Optional[str] = None, + timestamp_opt: Optional[str] = None +) -> Dict[str, Any]: + """Update an existing audience member or add if not exists.""" + logger.info(f"Executing tool: update_member with list_id: {list_id}, email: {email_address}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}" + + payload = { + "email_address": email_address + } + + if status: + payload["status"] = status + if merge_fields: + payload["merge_fields"] = merge_fields + if interests: + payload["interests"] = interests + if language: + payload["language"] = language + if vip is not None: + payload["vip"] = vip + if ip_opt: + payload["ip_opt"] = ip_opt + if timestamp_opt: + payload["timestamp_opt"] = timestamp_opt + + member_data = await make_mailchimp_request("PATCH", endpoint, json_data=payload) + + result = { + "id": member_data.get("id"), + "email_address": member_data.get("email_address"), + "status": member_data.get("status"), + "merge_fields": member_data.get("merge_fields", {}), + "interests": member_data.get("interests", {}), + "stats": member_data.get("stats", {}), + "member_rating": member_data.get("member_rating"), + "last_changed": member_data.get("last_changed"), + "language": member_data.get("language"), + "vip": member_data.get("vip"), + "location": member_data.get("location", {}), + "tags_count": member_data.get("tags_count"), + "tags": member_data.get("tags", []), + "list_id": list_id, + "updated_fields": [k for k in payload.keys() if k != "email_address"] + } + + return result + except Exception as e: + logger.exception(f"Error executing tool update_member: {e}") + return { + "error": "Failed to update member", + "list_id": list_id, + "email_address": email_address, + "exception": str(e) + } + +async def delete_member(list_id: str, email_address: str) -> Dict[str, Any]: + """Delete/permanently remove a member from an audience.""" + logger.info(f"Executing tool: delete_member with list_id: {list_id}, email: {email_address}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}" + + await make_mailchimp_request("DELETE", endpoint, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Member {email_address} has been permanently deleted from audience {list_id}", + "list_id": list_id, + "email_address": email_address, + "subscriber_hash": subscriber_hash, + "warning": "This action is permanent. Consider using update_member with status 'unsubscribed' instead." + } + + return result + except Exception as e: + logger.exception(f"Error executing tool delete_member: {e}") + return { + "error": "Failed to delete member", + "list_id": list_id, + "email_address": email_address, + "exception": str(e) + } + +async def add_member_tags(list_id: str, email_address: str, tags: List[str]) -> Dict[str, Any]: + """Add tags to a specific audience member.""" + logger.info(f"Executing tool: add_member_tags with list_id: {list_id}, email: {email_address}, tags: {tags}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}/tags" + + payload = { + "tags": [{"name": tag, "status": "active"} for tag in tags] + } + + await make_mailchimp_request("POST", endpoint, json_data=payload, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Tags added to member {email_address}", + "list_id": list_id, + "email_address": email_address, + "tags_added": tags, + "tags_count": len(tags) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool add_member_tags: {e}") + return { + "error": "Failed to add tags to member", + "list_id": list_id, + "email_address": email_address, + "tags": tags, + "exception": str(e) + } + +async def remove_member_tags(list_id: str, email_address: str, tags: List[str]) -> Dict[str, Any]: + """Remove tags from a specific audience member.""" + logger.info(f"Executing tool: remove_member_tags with list_id: {list_id}, email: {email_address}, tags: {tags}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}/tags" + + payload = { + "tags": [{"name": tag, "status": "inactive"} for tag in tags] + } + + await make_mailchimp_request("POST", endpoint, json_data=payload, expect_empty_response=True) + + result = { + "status": "success", + "message": f"Tags removed from member {email_address}", + "list_id": list_id, + "email_address": email_address, + "tags_removed": tags, + "tags_count": len(tags) + } + + return result + except Exception as e: + logger.exception(f"Error executing tool remove_member_tags: {e}") + return { + "error": "Failed to remove tags from member", + "list_id": list_id, + "email_address": email_address, + "tags": tags, + "exception": str(e) + } + +async def get_member_activity(list_id: str, email_address: str, count: int = 10) -> Dict[str, Any]: + """Get the last 50 events of a member's activity on a specific list.""" + logger.info(f"Executing tool: get_member_activity with list_id: {list_id}, email: {email_address}") + try: + subscriber_hash = _get_subscriber_hash(email_address) + endpoint = f"/lists/{list_id}/members/{subscriber_hash}/activity" + + params = {"count": count} + + activity_data = await make_mailchimp_request("GET", endpoint, params=params) + + result = { + "email_id": activity_data.get("email_id"), + "list_id": activity_data.get("list_id"), + "list_is_active": activity_data.get("list_is_active"), + "contact_status": activity_data.get("contact_status"), + "email_address": email_address, + "total_items": activity_data.get("total_items"), + "activity": [] + } + + for activity in activity_data.get("activity", []): + activity_info = { + "action": activity.get("action"), + "timestamp": activity.get("timestamp"), + "url": activity.get("url"), + "type": activity.get("type"), + "campaign_id": activity.get("campaign_id"), + "title": activity.get("title"), + "parent_campaign": activity.get("parent_campaign") + } + result["activity"].append(activity_info) + + return result + except Exception as e: + logger.exception(f"Error executing tool get_member_activity: {e}") + return { + "error": "Failed to retrieve member activity", + "list_id": list_id, + "email_address": email_address, + "exception": str(e) + } \ No newline at end of file diff --git a/mcp_servers/markitdown/README.md b/mcp_servers/markitdown/README.md index 39ca68ae..418e1e8e 100644 --- a/mcp_servers/markitdown/README.md +++ b/mcp_servers/markitdown/README.md @@ -1,74 +1,73 @@ # MarkItDown MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) - -This project provides a Model Context Protocol (MCP) server that utilizes the `markitdown` library to convert various document types hosted at a given URL into Markdown format. - -## Features - -* Accepts a URL (HTTP/HTTPS) pointing to a document. -* Supports conversion for: - * PDF - * Microsoft Office Documents (Word, PowerPoint, Excel) - * HTML - * Text-based formats (CSV, JSON, XML) - * ZIP archives (extracts and converts contents) - * EPub files -* Handles optional authentication tokens for accessing protected resources. - -## Running Locally - -There are two recommended ways to run this server locally: using Docker (recommended) or using a Python virtual environment. - -### Using Docker (Recommended) - -This is the easiest way to get the server running without managing dependencies manually. - -1. **Ensure Docker is installed** on your system. -2. **Navigate to the root directory** of the `klavis` project in your terminal. -3. **Build the Docker image:** - ```bash - docker build -t markitdown-mcp -f mcp_servers/markitdown/Dockerfile . - ``` - *(You can replace `markitdown-mcp` with any tag you prefer)* -4. **Run the Docker container:** - ```bash - docker run -p 5000:5000 markitdown-mcp - ``` - This command maps port 5000 on your host machine to port 5000 inside the container, where the server listens. - -The server should now be running and accessible at `http://localhost:5000`. - -### Using Python Virtual Environment - -This method requires Python 3.12 and `pip` to be installed. - -1. **Navigate to this directory** (`mcp_servers/markitdown`) in your terminal. -2. **Create a Python virtual environment:** - ```bash - python3 -m venv .venv - ``` -3. **Activate the virtual environment:** - * On macOS/Linux: - ```bash - source .venv/bin/activate - ``` - * On Windows: - ```bash - .\.venv\Scripts\activate - ``` -4. **Install the required dependencies:** - ```bash - pip install -r requirements.txt - ``` -5. **Run the server:** - ```bash - python server.py - ``` - -The server should now be running and accessible at `http://localhost:5000`. - -6. **Deactivate the virtual environment** when you are finished: - ```bash - deactivate - ``` \ No newline at end of file +A Model Context Protocol (MCP) server for MarkItDown integration. Convert documents and content to Markdown format using MarkItDown's conversion capabilities. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to MarkItDown with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MARKITDOWN", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/markitdown-mcp-server:latest + + +# Run MarkItDown MCP Server (no authentication required) +docker run -p 5000:5000 \ + ghcr.io/klavis-ai/markitdown-mcp-server:latest +``` + +**No Authentication:** MarkItDown conversion typically requires no external authentication. + +## šŸ› ļø Available Tools + +- **Document Conversion**: Convert various document formats to Markdown +- **HTML to Markdown**: Convert HTML content to clean Markdown +- **File Processing**: Process multiple document types and formats +- **Content Cleaning**: Clean and optimize converted Markdown output +- **Batch Processing**: Convert multiple documents efficiently + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/markitdown/requirements.txt b/mcp_servers/markitdown/requirements.txt index 03eb3370..4612340c 100644 --- a/mcp_servers/markitdown/requirements.txt +++ b/mcp_servers/markitdown/requirements.txt @@ -1,4 +1,4 @@ -mcp>=1.6.0 +mcp==1.11.0 markitdown[all] pydantic fastapi diff --git a/mcp_servers/markitdown/server.py b/mcp_servers/markitdown/server.py index 6d49a4f8..80a46c74 100644 --- a/mcp_servers/markitdown/server.py +++ b/mcp_servers/markitdown/server.py @@ -1,42 +1,44 @@ -from mcp.server.fastmcp import FastMCP -from markitdown import MarkItDown -import requests +import os +import logging +import contextlib import tempfile +from collections.abc import AsyncIterator from typing import Annotated + +import click +import requests +from markitdown import MarkItDown +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send from pydantic import Field -mcp = FastMCP( - "FileReader", - instructions="A file reader and converter that can read files from urls and convert them to markdown." - "It currently supports: PDF, PowerPoint, Word, Excel, HTML, Text-based formats (CSV, JSON, XML), ZIP files (iterates over contents), EPubs.", - port=5000, -) +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("markitdown-mcp-server") + +# Default port configuration +MARKITDOWN_MCP_SERVER_PORT = int(os.getenv("MARKITDOWN_MCP_SERVER_PORT", "5000")) -@mcp.tool() -async def convert_document_to_markdown( - uri: Annotated[ - str, - Field( - description="The URI of the resource to convert to markdown. The resource MUST be one of the supported types: PDF, " - "PowerPoint, Word, Excel, HTML, Text-based formats (CSV, JSON, XML), ZIP files (iterates over contents), EPubs." - ), - ], - auth_token: Annotated[ - str, Field(description="The optional authentication token for the resource.") - ], -) -> str: +async def convert_document_to_markdown(uri: str) -> str: """Convert a resource described by an http:, https: to markdown. + Args: + uri: The URI of the resource to convert to markdown + Returns: The markdown representation of the resource. """ if not uri.startswith("http") and not uri.startswith("https"): return f"Unsupported uri. Only http:, https: are supported." - response = requests.get( - uri, headers={"Authorization": f"Bearer {auth_token}"} if auth_token else None - ) + response = requests.get(uri) if response.status_code == 200: # Save the PDF to a temporary file with tempfile.NamedTemporaryFile( @@ -48,8 +50,159 @@ async def convert_document_to_markdown( return f"Failed to download the resource. Status code: {response.status_code}" -def main(): - mcp.run(transport="sse") +@click.command() +@click.option("--port", default=MARKITDOWN_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server( + "markitdown-mcp-server", + instructions="A file reader and converter that can read files from urls and convert them to markdown." + "It currently supports: PDF, PowerPoint, Word, Excel, HTML, Text-based formats (CSV, JSON, XML), ZIP files (iterates over contents), EPubs.", + ) + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="convert_document_to_markdown", + description="Convert a resource described by an http:, https: to markdown.", + inputSchema={ + "type": "object", + "required": ["uri"], + "properties": { + "uri": { + "type": "string", + "description": "The URI of the resource to convert to markdown. The resource MUST be one of the supported types: PDF, " + "PowerPoint, Word, Excel, HTML, Text-based formats (CSV, JSON, XML), ZIP files (iterates over contents), EPubs." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "MARKITDOWN_CONVERT", "readOnlyHint": True}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "convert_document_to_markdown": + uri = arguments.get("uri") + + if not uri: + return [ + types.TextContent( + type="text", + text="Error: URI parameter is required", + ) + ] + + try: + result = await convert_document_to_markdown(uri) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 if __name__ == "__main__": diff --git a/mcp_servers/mem0/.env.example b/mcp_servers/mem0/.env.example new file mode 100644 index 00000000..1a3e37d0 --- /dev/null +++ b/mcp_servers/mem0/.env.example @@ -0,0 +1 @@ +MEM0_API_KEY= \ No newline at end of file diff --git a/mcp_servers/mem0/Dockerfile b/mcp_servers/mem0/Dockerfile new file mode 100644 index 00000000..8713c9cf --- /dev/null +++ b/mcp_servers/mem0/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/mem0/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/mem0/server.py . +COPY mcp_servers/mem0/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/mem0/README.md b/mcp_servers/mem0/README.md new file mode 100644 index 00000000..ccde27ad --- /dev/null +++ b/mcp_servers/mem0/README.md @@ -0,0 +1,73 @@ +# Mem0 MCP Server + +A Model Context Protocol (MCP) server for Mem0 integration. Manage AI memory and context storage using Mem0's memory management API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Mem0 with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MEM0", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/mem0-mcp-server:latest + + +# Run Mem0 MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/mem0-mcp-server:latest +``` + +**API Key Setup:** Get your Mem0 API key from the [Mem0 Dashboard](https://app.mem0.ai/). + +## šŸ› ļø Available Tools + +- **Memory Management**: Store and retrieve AI conversation memories +- **Context Storage**: Manage long-term context and user preferences +- **Memory Search**: Search through stored memories and contexts +- **User Profiles**: Maintain user-specific memory profiles +- **Memory Analytics**: Analyze memory usage and patterns + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/mem0/requirements.txt b/mcp_servers/mem0/requirements.txt new file mode 100644 index 00000000..db7be90a --- /dev/null +++ b/mcp_servers/mem0/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +fastapi +uvicorn[standard] +click>=8.0.0 +pydantic>=2.5.0 +httpx>=0.27.0 +python-dotenv>=1.0.0 +typing-extensions +starlette>=0.27.0 +mem0ai>=0.1.115 \ No newline at end of file diff --git a/mcp_servers/mem0/server.py b/mcp_servers/mem0/server.py new file mode 100644 index 00000000..f57567f7 --- /dev/null +++ b/mcp_servers/mem0/server.py @@ -0,0 +1,438 @@ +import os +import base64 +import logging +import contextlib +import json +from collections.abc import AsyncIterator + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + mem0_api_key_context, + add_memory, + get_all_memories, + search_memories, + update_memory, + delete_memory, +) + +load_dotenv() + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("mem0-mcp-server") + +MEM0_MCP_SERVER_PORT = int(os.getenv("MEM0_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=MEM0_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("mem0-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="mem0_add_memory", + description="Add a new memory to mem0 for long-term storage. This tool stores code snippets, implementation details, and programming knowledge for future reference. When storing information, you should include: complete code with all necessary imports and dependencies, language/framework version information, full implementation context and any required setup/configuration, detailed comments explaining the logic, example usage or test cases, any known limitations or performance considerations, related patterns or alternative approaches, links to relevant documentation or resources, environment setup requirements, and error handling tips. The memory will be indexed for semantic search and can be retrieved later using natural language queries.", + inputSchema={ + "type": "object", + "required": ["content"], + "properties": { + "content": { + "type": "string", + "description": "The content to store in memory, including code, documentation, and context." + }, + "user_id": { + "type": "string", + "description": "Optional user ID. If not provided, uses the default user ID." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) + ), + types.Tool( + name="mem0_get_all_memories", + description="Retrieve all stored memories for the user. Call this tool when you need complete context of all previously stored information. This is useful when you need to analyze all available code patterns and knowledge, check all stored implementation examples, review the full history of stored solutions, or ensure no relevant information is missed. Returns a comprehensive list of code snippets and implementation patterns, programming knowledge and best practices, technical documentation and examples, and setup and configuration guides. Results are returned in JSON format with metadata.", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "Optional user ID. If not provided, uses the default user ID." + }, + "page": { + "type": "integer", + "description": "Page number for pagination (default: 1).", + "default": 1 + }, + "page_size": { + "type": "integer", + "description": "Number of memories per page (default: 50).", + "default": 50 + } + } + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}) + ), + types.Tool( + name="mem0_search_memories", + description="Search through stored memories using semantic search. This tool should be called for user queries to find relevant code and implementation details. It helps find specific code implementations or patterns, solutions to programming problems, best practices and coding standards, setup and configuration guides, and technical documentation and examples. The search uses natural language understanding to find relevant matches, so you can describe what you're looking for in plain English. Search the memories to leverage existing knowledge before providing answers.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Search query string describing what you're looking for. Can be natural language or specific technical terms." + }, + "user_id": { + "type": "string", + "description": "Optional user ID. If not provided, uses the default user ID." + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return (default: 20).", + "default": 20 + } + } + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}) + ), + types.Tool( + name="mem0_update_memory", + description="Update an existing memory with new data. This tool allows you to modify the content of a previously stored memory while maintaining its unique identifier. Use this when you need to correct, enhance, or completely replace the content of an existing memory entry.", + inputSchema={ + "type": "object", + "required": ["memory_id", "data"], + "properties": { + "memory_id": { + "type": "string", + "description": "The unique identifier of the memory to update." + }, + "data": { + "type": "string", + "description": "The new content to replace the existing memory data." + }, + "user_id": { + "type": "string", + "description": "Optional user ID. If not provided, uses the default user ID." + } + } + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) + ), + types.Tool( + name="mem0_delete_memory", + description="Delete a specific memory by ID or delete all memories for a user. This tool provides options to remove individual memories or clear all stored memories for a user. Use with caution as deleted memories cannot be recovered.", + inputSchema={ + "type": "object", + "properties": { + "memory_id": { + "type": "string", + "description": "The unique identifier of the memory to delete. Required if delete_all is false." + }, + "user_id": { + "type": "string", + "description": "Optional user ID. If not provided, uses the default user ID." + }, + "delete_all": { + "type": "boolean", + "description": "If true, deletes all memories for the user. If false, deletes specific memory by ID.", + "default": False + } + } + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "mem0_add_memory": + content = arguments.get("content") + user_id = arguments.get("user_id") + if not content: + return [ + types.TextContent( + type="text", + text="Error: content parameter is required", + ) + ] + try: + result = await add_memory(content, user_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mem0_get_all_memories": + user_id = arguments.get("user_id") + page = arguments.get("page", 1) + page_size = arguments.get("page_size", 50) + try: + result = await get_all_memories(user_id, page, page_size) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mem0_search_memories": + query = arguments.get("query") + user_id = arguments.get("user_id") + limit = arguments.get("limit", 20) + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + try: + result = await search_memories(query, user_id, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mem0_update_memory": + memory_id = arguments.get("memory_id") + data = arguments.get("data") + user_id = arguments.get("user_id") + if not memory_id or not data: + return [ + types.TextContent( + type="text", + text="Error: memory_id and data parameters are required", + ) + ] + try: + result = await update_memory(memory_id, data, user_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mem0_delete_memory": + memory_id = arguments.get("memory_id") + user_id = arguments.get("user_id") + delete_all = arguments.get("delete_all", False) + if not delete_all and not memory_id: + return [ + types.TextContent( + type="text", + text="Error: memory_id parameter is required when delete_all is false", + ) + ] + try: + result = await delete_memory(memory_id, user_id, delete_all) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = mem0_api_key_context.set(api_key or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + mem0_api_key_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = mem0_api_key_context.set(api_key or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + mem0_api_key_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/mem0/tools/__init__.py b/mcp_servers/mem0/tools/__init__.py new file mode 100644 index 00000000..a19ba425 --- /dev/null +++ b/mcp_servers/mem0/tools/__init__.py @@ -0,0 +1,24 @@ +# mem0 MCP Server Tools +# This package contains all the tool implementations organized by functionality + +from .memories import ( + add_memory, + get_all_memories, + search_memories, + update_memory, + delete_memory +) +from .base import get_user_id, mem0_api_key_context + +__all__ = [ + # Memories + "add_memory", + "get_all_memories", + "search_memories", + "update_memory", + "delete_memory", + + # Base + "get_user_id", + "mem0_api_key_context", +] diff --git a/mcp_servers/mem0/tools/base.py b/mcp_servers/mem0/tools/base.py new file mode 100644 index 00000000..38709c9f --- /dev/null +++ b/mcp_servers/mem0/tools/base.py @@ -0,0 +1,49 @@ +import logging +import os +from contextvars import ContextVar +from mem0 import MemoryClient +from dotenv import load_dotenv + +logger = logging.getLogger(__name__) + +load_dotenv() + +DEFAULT_USER_ID = os.getenv("DEFAULT_USER_ID", "mem0_mcp") +CUSTOM_INSTRUCTIONS = """ +Extract the Following Information: + +- Code Snippets: Save the actual code for future reference and analysis. +- Explanation: Document a clear description of what the code does, its purpose, and implementation details. +- Technical Context: Include information about programming languages, frameworks, libraries, dependencies, and system requirements. +- Key Features: Highlight main functionalities, important methods, design patterns, and notable implementation aspects. +- Usage Context: Document how and when the code should be used, including any prerequisites or constraints. +""" + +mem0_api_key_context: ContextVar[str] = ContextVar('mem0_api_key') + +def get_mem0_api_key() -> str: + """Get the mem0 API key from context or environment.""" + try: + return mem0_api_key_context.get() + except LookupError: + api_key = os.getenv("MEM0_API_KEY") + if not api_key: + raise RuntimeError("mem0 API key not found in request context or environment") + return api_key + +def get_user_id() -> str: + """Get the current user identifier for memory operations.""" + logger.debug(f"DEFAULT_USER_ID: {DEFAULT_USER_ID}") + return DEFAULT_USER_ID + +def get_mem0_client() -> MemoryClient: + """Get a configured mem0 client with current API key from context.""" + try: + api_key = get_mem0_api_key() + client = MemoryClient(api_key=api_key) + client.update_project(custom_instructions=CUSTOM_INSTRUCTIONS) + logger.debug("mem0 client initialized successfully") + return client + except Exception as e: + logger.error(f"Failed to initialize mem0 client: {e}") + raise diff --git a/mcp_servers/mem0/tools/memories.py b/mcp_servers/mem0/tools/memories.py new file mode 100644 index 00000000..c0d34f85 --- /dev/null +++ b/mcp_servers/mem0/tools/memories.py @@ -0,0 +1,141 @@ +import logging +from typing import Any, Dict +from .base import get_mem0_client, get_user_id + +# Configure logging +logger = logging.getLogger(__name__) + +async def add_memory(content: str, user_id: str = None) -> Dict[str, Any]: + """Add a new memory to mem0.""" + if not user_id: + user_id = get_user_id() + + logger.info(f"Adding memory for user: {user_id}") + try: + mem0_client = get_mem0_client() + messages = [{"role": "user", "content": content}] + result = mem0_client.add(messages, user_id=user_id, output_format="v1.1") + logger.info(f"Successfully added memory for user {user_id}") + return { + "success": True, + "message": f"Successfully added memory: {content[:100]}{'...' if len(content) > 100 else ''}", + "user_id": user_id + } + + except Exception as e: + logger.exception(f"Error adding memory: {e}") + raise e + +async def get_all_memories(user_id: str = None, page: int = 1, page_size: int = 50) -> Dict[str, Any]: + """Get all memories for a user.""" + if not user_id: + user_id = get_user_id() + + logger.info(f"Getting all memories for user: {user_id}") + try: + mem0_client = get_mem0_client() + memories = mem0_client.get_all(user_id=user_id, page=page, page_size=page_size) + formatted_memories = [] + for memory in memories["results"]: + formatted_memories.append({ + "id": memory["id"], + "memory": memory["memory"], + "created_at": memory.get("created_at"), + "updated_at": memory.get("updated_at") + }) + + logger.info(f"Retrieved {len(formatted_memories)} memories for user {user_id}") + return { + "success": True, + "memories": formatted_memories, + "user_id": user_id, + "total_results": len(formatted_memories), + "page": page, + "page_size": page_size + } + except Exception as e: + logger.exception(f"Error getting memories: {e}") + raise e + +async def search_memories(query: str, user_id: str = None, limit: int = 20) -> Dict[str, Any]: + """Search memories using semantic search.""" + if not user_id: + user_id = get_user_id() + + logger.info(f"Searching memories for user {user_id} with query: {query}") + try: + mem0_client = get_mem0_client() + memories = mem0_client.search(query, user_id=user_id, output_format="v1.1") + + formatted_memories = [] + for memory in memories["results"][:limit]: + formatted_memories.append({ + "id": memory["id"], + "memory": memory["memory"], + "score": memory.get("score", 0), + "created_at": memory.get("created_at"), + "updated_at": memory.get("updated_at") + }) + + logger.info(f"Found {len(formatted_memories)} matching memories for user {user_id}") + return { + "success": True, + "memories": formatted_memories, + "user_id": user_id, + "query": query, + "total_results": len(formatted_memories) + } + except Exception as e: + logger.exception(f"Error searching memories: {e}") + raise e + +async def update_memory(memory_id: str, data: str, user_id: str = None) -> Dict[str, Any]: + """Update an existing memory.""" + if not user_id: + user_id = get_user_id() + + logger.info(f"Updating memory {memory_id} for user: {user_id}") + try: + mem0_client = get_mem0_client() + result = mem0_client.update(memory_id=memory_id, text=data) + logger.info(f"Successfully updated memory {memory_id} for user {user_id}") + return { + "success": True, + "message": f"Successfully updated memory: {memory_id}", + "user_id": user_id, + "memory_id": memory_id, + "result": result + } + + except Exception as e: + logger.exception(f"Error updating memory: {e}") + raise e + +async def delete_memory(memory_id: str = None, user_id: str = None, delete_all: bool = False) -> Dict[str, Any]: + """Delete a specific memory or all memories for a user.""" + if not user_id: + user_id = get_user_id() + + logger.info(f"Deleting memory for user: {user_id}") + try: + mem0_client = get_mem0_client() + if delete_all: + result = mem0_client.delete_all(user_id=user_id) + logger.info(f"Successfully deleted all memories for user {user_id}") + return { + "success": True, + "message": f"Successfully deleted all memories for user: {user_id}", + "user_id": user_id + } + else: + result = mem0_client.delete(memory_id=memory_id) + logger.info(f"Successfully deleted memory {memory_id}") + return { + "success": True, + "message": f"Successfully deleted memory: {memory_id}", + "memory_id": memory_id + } + + except Exception as e: + logger.exception(f"Error deleting memory: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/mixpanel/.env.example b/mcp_servers/mixpanel/.env.example new file mode 100644 index 00000000..56ec3cb6 --- /dev/null +++ b/mcp_servers/mixpanel/.env.example @@ -0,0 +1,6 @@ +# For service account authentication (recommended for server-side) +MIXPANEL_SERVICE_ACCOUNT_USERNAME=your_service_account_username +MIXPANEL_SERVICE_ACCOUNT_SECRET=your_service_account_secret + +# Port for the MCP server to listen on +MIXPANEL_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/mixpanel/Dockerfile b/mcp_servers/mixpanel/Dockerfile new file mode 100644 index 00000000..90deed9a --- /dev/null +++ b/mcp_servers/mixpanel/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/mixpanel/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/mixpanel/server.py . +COPY mcp_servers/mixpanel/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/mixpanel/README.md b/mcp_servers/mixpanel/README.md new file mode 100644 index 00000000..f7ea59d4 --- /dev/null +++ b/mcp_servers/mixpanel/README.md @@ -0,0 +1,73 @@ +# Mixpanel MCP Server + +A Model Context Protocol (MCP) server for Mixpanel integration. Analyze user events, track metrics, and manage analytics using Mixpanel's API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Mixpanel with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MIXPANEL", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/mixpanel-mcp-server:latest + + +# Run Mixpanel MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/mixpanel-mcp-server:latest +``` + +**API Secret Setup:** Get your Mixpanel API secret from your [Mixpanel project settings](https://mixpanel.com/settings/project). + +## šŸ› ļø Available Tools + +- **Event Analytics**: Track and analyze user events and behaviors +- **User Profiles**: Manage user properties and segmentation +- **Funnel Analysis**: Create and analyze conversion funnels +- **Cohort Analysis**: Track user retention and engagement +- **Report Generation**: Generate custom analytics reports + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/mixpanel/requirements.txt b/mcp_servers/mixpanel/requirements.txt new file mode 100644 index 00000000..81aab075 --- /dev/null +++ b/mcp_servers/mixpanel/requirements.txt @@ -0,0 +1,9 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/mixpanel/server.py b/mcp_servers/mixpanel/server.py new file mode 100644 index 00000000..91b1887c --- /dev/null +++ b/mcp_servers/mixpanel/server.py @@ -0,0 +1,879 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + username_context, + secret_context, + send_events, + get_projects, + get_events, + get_event_properties, + get_event_property_values, + run_funnels_query, + run_frequency_query, + run_retention_query, + run_segmentation_query, +) + +logger = logging.getLogger(__name__) + +load_dotenv() + +MIXPANEL_MCP_SERVER_PORT = int(os.getenv("MIXPANEL_MCP_SERVER_PORT", "5000")) + +def extract_credentials(request_or_scope) -> Dict[str, str]: + """Extract service account credentials from x-auth-data header.""" + username = os.getenv("MIXPANEL_SERVICE_ACCOUNT_USERNAME") + secret = os.getenv("MIXPANEL_SERVICE_ACCOUNT_SECRET") + + auth_data = None + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data_header = request_or_scope.headers.get(b'x-auth-data') + if auth_data_header: + auth_data = base64.b64decode(auth_data_header).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data_header = headers.get(b'x-auth-data') + if auth_data_header: + auth_data = base64.b64decode(auth_data_header).decode('utf-8') + + # If no credentials from environment, try to parse from auth_data (from prod) + if auth_data and (not username or not secret): + try: + # Parse the JSON auth data to extract credentials + auth_json = json.loads(auth_data) + username = auth_json.get('serviceaccount_username', '') or username + secret = auth_json.get('serviceaccount_secret', '') or secret + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + + return { + 'username': username or "", + 'secret': secret or "", + } + +@click.command() +@click.option("--port", default=MIXPANEL_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("mixpanel-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="mixpanel_send_events", + description="Send events to Mixpanel using the /import endpoint with Service Account authentication. Send batches of events with automatic deduplication support. Each event requires time, distinct_id, and $insert_id for proper processing.", + inputSchema={ + "type": "object", + "required": ["project_id", "events"], + "properties": { + "project_id": { + "type": "string", + "description": "The Mixpanel project ID to import events to. Use get_projects tool to find available project IDs.", + }, + "events": { + "type": "array", + "description": "Array of event objects to import", + "items": { + "type": "object", + "required": ["event"], + "properties": { + "event": { + "type": "string", + "description": "The name of the event (e.g., 'Page View', 'Button Click', 'Purchase')", + }, + "properties": { + "type": "object", + "description": "Event properties. Will auto-generate time and $insert_id if not provided.", + "additionalProperties": True, + }, + "distinct_id": { + "type": "string", + "description": "Unique identifier for the user performing the event (empty string if anonymous)", + }, + }, + }, + "minItems": 1, + "maxItems": 2000, + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_EVENT"}), + ), + types.Tool( + name="mixpanel_get_events", + description="Get event names for the given Mixpanel project. Retrieves all event names that have been tracked in the specified project, useful for discovering what events are available for analysis.", + inputSchema={ + "type": "object", + "required": ["project_id"], + "properties": { + "project_id": { + "type": "string", + "description": "The Mixpanel project ID to get event names for", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_EVENT", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_get_event_properties", + description=( + "Get event properties for the given event and Mixpanel project. " + "These are the properties available when filtering and aggregating in queries." + ), + inputSchema={ + "type": "object", + "required": ["project_id", "event"], + "properties": { + "project_id": { + "type": ["string", "integer"], + "description": "The Mixpanel project ID", + }, + "event": { + "type": "string", + "description": "Event name (e.g., 'AI Prompt Sent')", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_EVENT_METADATA", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_get_event_property_values", + description=( + "Get event property values for the given event name, Mixpanel project, and property name. " + "These are the values that are available for the given event property." + ), + inputSchema={ + "type": "object", + "required": ["project_id", "event", "property"], + "properties": { + "project_id": { + "type": ["string", "integer"], + "description": "The Mixpanel project ID", + }, + "event": { + "type": "string", + "description": "Event name (e.g., 'AI Prompt Sent')", + }, + "property": { + "type": "string", + "description": "Property name (e.g., 'utm_source')", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_EVENT_METADATA", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_run_funnels_query", + description=( + "Run a funnel query. This measures the conversion rate of a user journey." + ), + inputSchema={ + "type": "object", + "required": ["events", "from_date", "to_date", "count_type", "project_id"], + "properties": { + "events": { + "type": ["array", "string"], + "description": "Ordered steps of the funnel. Either an array of step objects or a JSON-encoded string.", + "items": { + "type": "object", + "properties": { + "event": {"type": "string"}, + "step_label": {"type": "string"} + }, + "required": ["event"], + "additionalProperties": True + } + }, + "from_date": {"type": "string", "description": "YYYY-MM-DD"}, + "to_date": {"type": "string", "description": "YYYY-MM-DD"}, + "count_type": {"type": "string", "enum": ["unique", "general"], "default": "unique"}, + "project_id": {"type": ["string", "integer"]} + } + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_run_segmentation_query", + description=( + "The segmentation tool (run_segmentation_query) is a flexible way to slice and dice your event stream for deeper insights. " + "At a high level, it lets you: " + "• Select which events to analyze (e.g. a single event name or all events via \"$any_event\"). " + "• Specify a time window, defaulting to the last 30 days unless you override from_date/to_date. " + "• Bucket your results into time intervals (day, hour, or month) so you can see trends over time. " + "• Choose your metric: " + "general: raw event counts, i.e. how many times the event fired. " + "unique: unique user counts, i.e. how many distinct users triggered the event. " + "• Filter the data with arbitrary boolean expressions (where)—for example, only count purchases above $100 or sessions where plan == \"Pro\". " + "• Segment (group) the results by any property or computed key (on), from simple string or numeric properties to math‐ or typecast‐based buckets. " + "• Apply numerical aggregations when grouping on a numeric field: " + "sum: total up the values in each segment (e.g. total revenue per day). " + "average: compute the mean value in each segment (e.g. average session length by plan). " + "buckets: build a histogram of value ranges (e.g. count of purchases falling into each revenue bracket)." + ), + inputSchema={ + "type": "object", + "required": ["project_id", "event"], + "properties": { + "project_id": { + "type": ["string", "integer"], + "description": "The Mixpanel project ID", + }, + "event": { + "type": "string", + "description": "Event name to analyze.", + }, + "from_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD. Defaults to 30 days ago.", + }, + "to_date": { + "type": "string", + "description": "End date in YYYY-MM-DD. Defaults to today.", + }, + "unit": { + "type": "string", + "enum": ["hour", "day", "month"], + "default": "day", + "description": "Granularity of time buckets.", + }, + "type": { + "type": "string", + "enum": ["general", "unique"], + "default": "general", + "description": "Metric for counts (raw vs distinct users).", + }, + "where": { + "type": "string", + "description": "Optional boolean expression filter", + }, + "on": { + "type": "string", + "description": "Optional segmentation property or computed key", + }, + "numerical_aggregation": { + "type": "string", + "enum": ["sum", "average", "buckets"], + "description": "Optional numeric aggregation when grouping by a numeric field.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_run_retention_query", + description=( + "The retention tool tracks user engagement over time and supports cohort analysis. " + "It lets you select a born event ('born_event') and a retention event ('event'), specify a time window, " + "choose retention type (birth or compounded), bucket by day/week/month, choose a metric (general or unique), " + "filter the data with boolean expressions ('where'), and segment results by any property ('on')." + ), + inputSchema={ + "type": "object", + "required": ["project_id", "event"], + "properties": { + "project_id": { + "type": ["string", "integer"], + "description": "The Mixpanel project ID", + }, + "event": { + "type": "string", + "description": "The retention event to analyze", + }, + "born_event": { + "type": "string", + "description": "The born/cohort event. Defaults to event if not provided", + }, + "from_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD. Defaults to 30 days ago.", + }, + "to_date": { + "type": "string", + "description": "End date in YYYY-MM-DD. Defaults to today.", + }, + "unit": { + "type": "string", + "enum": ["day", "week", "month"], + "default": "day", + "description": "Granularity of cohorts/time buckets.", + }, + "retention_type": { + "type": "string", + "enum": ["birth", "compounded"], + "default": "birth", + "description": "Retention analysis type.", + }, + "interval_count": { + "type": "integer", + "description": "Number of intervals to include.", + }, + "metric": { + "type": "string", + "enum": ["general", "unique"], + "default": "unique", + "description": "Metric for counts (raw vs distinct users).", + }, + "where": { + "type": "string", + "description": "Optional boolean expression filter", + }, + "on": { + "type": "string", + "description": "Optional segmentation property or computed key", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_run_frequency_query", + description=( + "The frequency tool (run_frequency_query) is a powerful way to track user engagement over time and do cohort analysis. " + "It lets you:\n" + "• Select which events to analyze both for born event \"born_event\" param and retention event \"event\" param.\n" + "• Specify a time window, defaulting to the last 30 days unless user specifies otherwise.\n" + "• Choose your unit:\n" + " - day\n" + " - week\n" + " - month\n" + "• Choose your addiction_unit:\n" + " - hour\n" + " - day\n" + " - week\n" + "• Choose your metric:\n" + " - general: raw event counts, i.e. how many times the event fired.\n" + " - unique: unique user counts, i.e. how many distinct users triggered the event.\n" + "• Filter the data with arbitrary boolean expressions (where)—for example, only count purchases above $100 or sessions where properties[\"plan\"] == \"Pro\".\n" + "• Segment (group) the results by any property or computed key (on), from simple string or numeric properties to math‐ or typecast‐based buckets." + ), + inputSchema={ + "type": "object", + "required": ["project_id", "event"], + "properties": { + "project_id": { + "type": ["string", "integer"], + "description": "The Mixpanel project ID", + }, + "event": { + "type": "string", + "description": "The retention event to analyze", + }, + "born_event": { + "type": "string", + "description": "The born/cohort event. Defaults to event if not provided", + }, + "from_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD. Defaults to 30 days ago.", + }, + "to_date": { + "type": "string", + "description": "End date in YYYY-MM-DD. Defaults to today.", + }, + "unit": { + "type": "string", + "enum": ["day", "week", "month"], + "default": "day", + "description": "Granularity of cohorts/time buckets.", + }, + "addiction_unit": { + "type": "string", + "enum": ["hour", "day", "week"], + "default": "hour", + "description": "Sub-interval used for frequency bins.", + }, + "metric": { + "type": "string", + "enum": ["general", "unique"], + "default": "unique", + "description": "Metric for counts (raw vs distinct users).", + }, + "where": { + "type": "string", + "description": "Optional boolean expression filter", + }, + "on": { + "type": "string", + "description": "Optional segmentation property or computed key", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="mixpanel_get_projects", + description="Get all projects that are accessible to the current service account user. Use this tool to discover available projects and their IDs, which are required for event tracking and user profile operations.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "MIXPANEL_PROJECT", "readOnlyHint": True}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "mixpanel_send_events": + project_id = arguments.get("project_id") + events = arguments.get("events") + + if not project_id: + return [ + types.TextContent( + type="text", + text="Error: project_id parameter is required", + ) + ] + + if not events: + return [ + types.TextContent( + type="text", + text="Error: events parameter is required", + ) + ] + + try: + result = await send_events(project_id, events) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_get_projects": + try: + result = await get_projects() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + elif name == "mixpanel_get_event_properties": + project_id = arguments.get("project_id") + event = arguments.get("event") + + if not project_id: + return [types.TextContent(type="text", text="Error: project_id parameter is required")] + if not event: + return [types.TextContent(type="text", text="Error: event parameter is required")] + + try: + result = await get_event_properties(str(project_id), event) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_get_event_property_values": + project_id = arguments.get("project_id") + event = arguments.get("event") + property_name = arguments.get("property") + + if not project_id: + return [types.TextContent(type="text", text="Error: project_id parameter is required")] + if not event: + return [types.TextContent(type="text", text="Error: event parameter is required")] + if not property_name: + return [types.TextContent(type="text", text="Error: property parameter is required")] + + try: + result = await get_event_property_values(str(project_id), event, property_name) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_run_funnels_query": + project_id = arguments.get("project_id") + events = arguments.get("events") + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + count_type = arguments.get("count_type", "unique") + + if not project_id: + return [types.TextContent(type="text", text="Error: project_id parameter is required")] + if not events: + return [types.TextContent(type="text", text="Error: events parameter is required")] + if not from_date or not to_date: + return [types.TextContent(type="text", text="Error: from_date and to_date are required")] + + try: + result = await run_funnels_query( + project_id=str(project_id), + events=events, + from_date=from_date, + to_date=to_date, + count_type=count_type, + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + elif name == "mixpanel_get_events": + project_id = arguments.get("project_id") + + if not project_id: + return [ + types.TextContent( + type="text", + text="Error: project_id parameter is required", + ) + ] + + try: + result = await get_events(project_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_run_frequency_query": + project_id = arguments.get("project_id") + event = arguments.get("event") + born_event = arguments.get("born_event") + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + unit = arguments.get("unit", "day") + addiction_unit = arguments.get("addiction_unit", "hour") + metric = arguments.get("metric", "unique") + where = arguments.get("where") + on = arguments.get("on") + + if not project_id: + return [ + types.TextContent(type="text", text="Error: project_id parameter is required") + ] + if not event: + return [ + types.TextContent(type="text", text="Error: event parameter is required") + ] + + try: + result = await run_frequency_query( + project_id=project_id, + event=event, + born_event=born_event, + from_date=from_date, + to_date=to_date, + unit=unit, + addiction_unit=addiction_unit, + metric=metric, + where=where, + on=on, + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_run_retention_query": + project_id = arguments.get("project_id") + event = arguments.get("event") + born_event = arguments.get("born_event") + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + unit = arguments.get("unit", "day") + retention_type = arguments.get("retention_type", "birth") + interval_count = arguments.get("interval_count") + metric = arguments.get("metric", "unique") + where = arguments.get("where") + on = arguments.get("on") + + if not project_id: + return [ + types.TextContent(type="text", text="Error: project_id parameter is required") + ] + if not event: + return [ + types.TextContent(type="text", text="Error: event parameter is required") + ] + + try: + result = await run_retention_query( + project_id=str(project_id), + event=event, + born_event=born_event, + from_date=from_date, + to_date=to_date, + unit=unit, + retention_type=retention_type, + interval_count=interval_count, + metric=metric, + where=where, + on=on, + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "mixpanel_run_segmentation_query": + project_id = arguments.get("project_id") + event = arguments.get("event") + from_date = arguments.get("from_date") + to_date = arguments.get("to_date") + unit = arguments.get("unit", "day") + metric_type = arguments.get("type", "general") + where = arguments.get("where") + on = arguments.get("on") + numerical_aggregation = arguments.get("numerical_aggregation") + + if not project_id: + return [ + types.TextContent(type="text", text="Error: project_id parameter is required") + ] + if not event: + return [ + types.TextContent(type="text", text="Error: event parameter is required") + ] + + try: + result = await run_segmentation_query( + project_id=str(project_id), + event=event, + from_date=from_date, + to_date=to_date, + unit=unit, + type=metric_type, + where=where, + on=on, + numerical_aggregation=numerical_aggregation, + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract credentials from headers + credentials = extract_credentials(request) + + # Set the credentials in context for this request + username_token = username_context.set(credentials['username']) + secret_token = secret_context.set(credentials['secret']) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + username_context.reset(username_token) + secret_context.reset(secret_token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract credentials from headers + credentials = extract_credentials(scope) + + # Set the credentials in context for this request + username_token = username_context.set(credentials['username']) + secret_token = secret_context.set(credentials['secret']) + try: + await session_manager.handle_request(scope, receive, send) + finally: + username_context.reset(username_token) + secret_context.reset(secret_token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/mixpanel/tools/__init__.py b/mcp_servers/mixpanel/tools/__init__.py new file mode 100644 index 00000000..1bcaca97 --- /dev/null +++ b/mcp_servers/mixpanel/tools/__init__.py @@ -0,0 +1,36 @@ +from .events import send_events, get_events, get_event_properties, get_event_property_values +from .funnels import run_funnels_query +from .projects import get_projects +from .base import username_context, secret_context +from .frequency import run_frequency_query +from .retention import run_retention_query +from .segmentation import run_segmentation_query + +__all__ = [ + # Events + "send_events", + "get_events", + "get_event_properties", + "get_event_property_values", + + # Frequency + "run_frequency_query", + + # Retention + "run_retention_query", + + # Segmentation + "run_segmentation_query", + + # Funnels + "run_funnels_query", + + # Projects + "get_projects", + + # Base + "username_context", + "secret_context", +] + + \ No newline at end of file diff --git a/mcp_servers/mixpanel/tools/base.py b/mcp_servers/mixpanel/tools/base.py new file mode 100644 index 00000000..7c65be62 --- /dev/null +++ b/mcp_servers/mixpanel/tools/base.py @@ -0,0 +1,295 @@ +import logging +import json +import base64 +import os +from typing import Any, Dict, Optional, Tuple +from contextvars import ContextVar +import httpx + +logger = logging.getLogger(__name__) + +# Mixpanel API Endpoints (as per https://developer.mixpanel.com/reference/overview) +MIXPANEL_INGESTION_ENDPOINT = "https://api.mixpanel.com/" # Ingestion API +MIXPANEL_EXPORT_ENDPOINT = "https://data.mixpanel.com/api/2.0/export" # Raw Data Export API +MIXPANEL_QUERY_ENDPOINT = "https://mixpanel.com/api" # Query API +MIXPANEL_APP_ENDPOINT = "https://mixpanel.com/api/app" # App Management APIs (projects, GDPR, etc.) + +# Context variables to store the credentials for each request +username_context: ContextVar[str] = ContextVar('serviceaccount_username') +secret_context: ContextVar[str] = ContextVar('serviceaccount_secret') + +def get_service_account_credentials() -> Tuple[str, str]: + """Get the service account credentials from context or environment. + + Returns: + Tuple of (service_account_username, service_account_secret) + """ + # First try to get from context variables + try: + username = username_context.get() + secret = secret_context.get() + if username and secret: + return username, secret + except LookupError: + pass + + # Fall back to environment variables + username = os.getenv("MIXPANEL_SERVICE_ACCOUNT_USERNAME", "") + secret = os.getenv("MIXPANEL_SERVICE_ACCOUNT_SECRET", "") + + if not username or not secret: + raise RuntimeError( + "Service account credentials not found. Please provide them via x-auth-data header " + "with 'serviceaccount_username' and 'serviceaccount_secret' fields, " + "or set MIXPANEL_SERVICE_ACCOUNT_USERNAME and " + "MIXPANEL_SERVICE_ACCOUNT_SECRET environment variables." + ) + + return username, secret + +class MixpanelIngestionClient: + """Client for Mixpanel Ingestion API using Service Account authentication. + + Ingestion API (api.mixpanel.com): For sending events, user profiles, and group data to Mixpanel. + Supports /import for batch events and /engage for user profile updates. + """ + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + project_id: Optional[str] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Mixpanel Ingestion API using Service Account auth. + + The ingestion API uses Service Account authentication for /import endpoint. + + Args: + method: HTTP method + endpoint: API endpoint + data: Request body data + params: Query parameters + project_id: Project ID for the import operation (required) + """ + if not project_id: + raise ValueError("project_id is required for ingestion operations") + + # Get service account credentials + username, secret = get_service_account_credentials() + + headers = { + "Content-Type": "application/json", + "Accept": "text/plain" + } + + url = f"{MIXPANEL_INGESTION_ENDPOINT}{endpoint}" + + # Add project_id to params for /import endpoint + if params is None: + params = {} + params["project_id"] = project_id + + # Use Basic Auth with service account credentials + auth = httpx.BasicAuth(username, secret) + + async with httpx.AsyncClient(timeout=30.0) as client: + if method.upper() == "POST": + response = await client.post(url, auth=auth, headers=headers, json=data, params=params) + elif method.upper() == "GET": + response = await client.get(url, auth=auth, headers=headers, params=params) + else: + raise ValueError(f"Unsupported HTTP method for ingestion: {method}") + + response.raise_for_status() + + # Handle response based on content type + content_type = response.headers.get("content-type", "") + if "application/json" in content_type: + return response.json() + elif response.text == "1": + return {"success": True, "message": "Events imported successfully"} + elif response.text == "0": + return {"success": False, "error": "Event import failed"} + else: + # Try to parse as JSON for error details + try: + return response.json() + except ValueError: + return {"success": True, "message": response.text or "Import successful"} + +class MixpanelExportClient: + """Client for Mixpanel Raw Data Export API using Service Account authentication. + + Raw Data Export API (data.mixpanel.com/api/2.0/export): For exporting raw event data. + """ + + @staticmethod + async def make_request( + method: str, + endpoint: str = "", + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Mixpanel Export API using Service Account authentication. + + Args: + method: HTTP method + endpoint: API endpoint (usually empty as the full path is in MIXPANEL_EXPORT_ENDPOINT) + data: Request body data + params: Query parameters + """ + # Get service account credentials + username, secret = get_service_account_credentials() + + # Use Basic Auth with service account credentials + auth = httpx.BasicAuth(username, secret) + + headers = { + "Content-Type": "application/json" + } + + # MIXPANEL_EXPORT_ENDPOINT already includes the full path /api/2.0/export + url = MIXPANEL_EXPORT_ENDPOINT + + async with httpx.AsyncClient(timeout=30.0) as client: + if method.upper() == "GET": + response = await client.get(url, auth=auth, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, auth=auth, headers=headers, json=data) + else: + raise ValueError(f"Unsupported HTTP method for export: {method}") + + response.raise_for_status() + + # Handle different response types + content_type = response.headers.get("content-type", "") + + if "application/json" in content_type: + return response.json() + elif response.text: + # The export endpoint returns newline-delimited JSON + lines = response.text.strip().split('\n') + events = [] + for line in lines: + if line: + try: + events.append(json.loads(line)) + except ValueError: + continue + return {"events": events} + else: + return {"success": True} + +class MixpanelQueryClient: + """Client for Mixpanel Query API using Service Account authentication. + + Query API (mixpanel.com/api): For calculated data like Insights, Funnels, Retention. + """ + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Mixpanel Query API using Service Account authentication. + + Args: + method: HTTP method + endpoint: API endpoint + data: Request body data + params: Query parameters + """ + # Get service account credentials + username, secret = get_service_account_credentials() + + # Use Basic Auth with service account credentials + auth = httpx.BasicAuth(username, secret) + + headers = { + "Content-Type": "application/json" + } + + url = f"{MIXPANEL_QUERY_ENDPOINT}{endpoint}" + + async with httpx.AsyncClient(timeout=30.0) as client: + if method.upper() == "GET": + response = await client.get(url, auth=auth, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, auth=auth, headers=headers, json=data) + else: + raise ValueError(f"Unsupported HTTP method for query: {method}") + + response.raise_for_status() + + # Handle response + content_type = response.headers.get("content-type", "") + + if "application/json" in content_type: + return response.json() + elif response.text: + try: + return response.json() + except ValueError: + return {"data": response.text} + else: + return {"success": True} + +class MixpanelAppAPIClient: + """Client for Mixpanel App Management API using Service Account authentication. + + App Management APIs (mixpanel.com/api/app): For project management, GDPR, schemas, etc. + """ + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Mixpanel App Management API using Service Account authentication. + + Args: + method: HTTP method + endpoint: API endpoint (e.g., "/me", "/projects/{id}") + data: Request body data + params: Query parameters + """ + # Get service account credentials + username, secret = get_service_account_credentials() + + # Use Basic Auth with service account credentials + auth = httpx.BasicAuth(username, secret) + + headers = { + "Content-Type": "application/json" + } + + url = f"{MIXPANEL_APP_ENDPOINT}{endpoint}" + + async with httpx.AsyncClient(timeout=30.0) as client: + if method.upper() == "GET": + response = await client.get(url, auth=auth, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, auth=auth, headers=headers, json=data) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Handle response + content_type = response.headers.get("content-type", "") + + if "application/json" in content_type: + return response.json() + elif response.text: + try: + return response.json() + except ValueError: + return {"data": response.text} + else: + return {"success": True} \ No newline at end of file diff --git a/mcp_servers/mixpanel/tools/events.py b/mcp_servers/mixpanel/tools/events.py new file mode 100644 index 00000000..7a8bd606 --- /dev/null +++ b/mcp_servers/mixpanel/tools/events.py @@ -0,0 +1,494 @@ +import logging +import json +from typing import Any, Dict, Optional, List +import time +import uuid + +from .base import ( + MixpanelIngestionClient, + MixpanelExportClient, + MixpanelQueryClient +) + +logger = logging.getLogger(__name__) + +async def send_events( + project_id: str, + events: List[Dict[str, Any]] +) -> Dict[str, Any]: + """Send events to Mixpanel using the /import endpoint with Service Account authentication. + Use this API to send batches of events from your servers to Mixpanel. + + Args: + project_id: The Mixpanel project ID to import events to + events: List of event objects to import + + Returns: + Dict with import results + """ + try: + if not events or not isinstance(events, list): + raise ValueError("Events must be a non-empty list") + + if not project_id: + raise ValueError("project_id is required") + + # Prepare batch event data for /import endpoint + batch_events = [] + for i, event_data in enumerate(events): + if not isinstance(event_data, dict): + raise ValueError(f"Event {i} must be a dictionary") + + event_name = event_data.get("event") + if not event_name: + raise ValueError(f"Event {i} missing required 'event' field") + + properties = event_data.get("properties", {}) + distinct_id = event_data.get("distinct_id", "") + + # Ensure required fields for /import endpoint + if "time" not in properties: + # Use current time in milliseconds if not specified + properties["time"] = int(time.time() * 1000) + + if "$insert_id" not in properties: + # Generate a unique insert_id for deduplication + properties["$insert_id"] = str(uuid.uuid4()) + + # Ensure distinct_id is in properties + properties["distinct_id"] = distinct_id or "" + + # Build event object for /import endpoint + event_obj = { + "event": event_name, + "properties": properties + } + + batch_events.append(event_obj) + + # Use Service Account auth with /import endpoint + result = await MixpanelIngestionClient.make_request( + "POST", + "/import", + data=batch_events, + project_id=project_id + ) + + # Add batch info to result + if isinstance(result, dict): + result["batch_size"] = len(batch_events) + result["events_processed"] = len(batch_events) if result.get("success", True) else 0 + result["project_id"] = project_id + + return result + + except Exception as e: + return { + "success": False, + "error": f"Failed to send events: {str(e)}", + "batch_size": len(events) if isinstance(events, list) else 0, + "events_processed": 0, + "project_id": project_id + } + +async def query_events( + from_date: str, + to_date: str, + event: Optional[str] = None, + where: Optional[str] = None, + limit: Optional[int] = 1000 +) -> Dict[str, Any]: + """Query raw event data from Mixpanel.""" + try: + params = { + "from_date": from_date, + "to_date": to_date + } + + if event: + params["event"] = f'["{event}"]' + + if where: + params["where"] = where + + if limit: + params["limit"] = str(limit) + + # The export endpoint is already included in MIXPANEL_EXPORT_ENDPOINT + result = await MixpanelExportClient.make_request("GET", "", params=params) + + if isinstance(result, dict) and "events" in result: + return { + "success": True, + "events": result["events"], + "count": len(result["events"]), + "message": f"Retrieved {len(result['events'])} events from {from_date} to {to_date}" + } + else: + return { + "success": False, + "events": [], + "error": f"Unexpected response format: {result}" + } + + except Exception as e: + return { + "success": False, + "events": [], + "error": f"Failed to query events: {str(e)}" + } + +async def get_event_count( + from_date: str, + to_date: str, + event: Optional[str] = None +) -> Dict[str, Any]: + """Get total event count for a date range from Mixpanel.""" + try: + params = { + "from_date": from_date, + "to_date": to_date + } + + # If specific event is provided, filter by it + if event: + params["event"] = f'["{event}"]' + + # Query events and count them + # The export endpoint is already included in MIXPANEL_EXPORT_ENDPOINT + result = await MixpanelExportClient.make_request("GET", "", params=params) + + if isinstance(result, dict) and "events" in result: + event_count = len(result["events"]) + + # Calculate additional stats + unique_users = set() + event_types = {} + + for event_data in result["events"]: + # Count unique users + if "properties" in event_data and "distinct_id" in event_data["properties"]: + unique_users.add(event_data["properties"]["distinct_id"]) + + # Count event types + event_name = event_data.get("event", "Unknown") + event_types[event_name] = event_types.get(event_name, 0) + 1 + + return { + "success": True, + "total_events": event_count, + "unique_users": len(unique_users), + "date_range": { + "from": from_date, + "to": to_date + }, + "event_breakdown": event_types, + "filtered_event": event if event else "All events", + "message": f"Found {event_count} events from {from_date} to {to_date}" + } + else: + return { + "success": False, + "total_events": 0, + "error": f"Unexpected response format: {result}" + } + + except Exception as e: + return { + "success": False, + "total_events": 0, + "error": f"Failed to get event count: {str(e)}" + } + +async def get_top_events( + from_date: str, + to_date: str, + limit: Optional[int] = 10 +) -> Dict[str, Any]: + """Get the most common events over a time period from Mixpanel.""" + try: + params = { + "from_date": from_date, + "to_date": to_date + } + + # Query all events for the time period + # The export endpoint is already included in MIXPANEL_EXPORT_ENDPOINT + result = await MixpanelExportClient.make_request("GET", "", params=params) + + if isinstance(result, dict) and "events" in result: + events = result["events"] + + # Count events by type + event_counts = {} + total_events = len(events) + unique_users = set() + + for event_data in events: + # Count events by name + event_name = event_data.get("event", "Unknown") + event_counts[event_name] = event_counts.get(event_name, 0) + 1 + + # Track unique users + if "properties" in event_data and "distinct_id" in event_data["properties"]: + unique_users.add(event_data["properties"]["distinct_id"]) + + # Sort events by count (descending) and get top N + sorted_events = sorted(event_counts.items(), key=lambda x: x[1], reverse=True) + top_events = sorted_events[:limit] if limit else sorted_events + + # Calculate percentages + top_events_with_stats = [] + for event_name, count in top_events: + percentage = (count / total_events * 100) if total_events > 0 else 0 + top_events_with_stats.append({ + "event_name": event_name, + "count": count, + "percentage": round(percentage, 2) + }) + + return { + "success": True, + "top_events": top_events_with_stats, + "total_events_analyzed": total_events, + "unique_users": len(unique_users), + "date_range": { + "from": from_date, + "to": to_date + }, + "limit_requested": limit, + "events_returned": len(top_events_with_stats), + "message": f"Found top {len(top_events_with_stats)} events from {from_date} to {to_date}" + } + else: + return { + "success": False, + "top_events": [], + "error": f"Unexpected response format: {result}" + } + + except Exception as e: + return { + "success": False, + "top_events": [], + "error": f"Failed to get top events: {str(e)}" + } + +async def get_todays_top_events( + limit: Optional[int] = 10 +) -> Dict[str, Any]: + """Get the most common events from today from Mixpanel analytics.""" + try: + from datetime import datetime, date + + # Get today's date in YYYY-MM-DD format + today = date.today().strftime('%Y-%m-%d') + + print(f"Querying today's events for date: {today}") + + # Use the existing get_top_events function with today's date + result = await get_top_events(today, today, limit) + + if isinstance(result, dict) and result.get("success"): + # Enhance the result with today-specific information + enhanced_result = { + **result, + "date": today, + "date_description": f"Today ({today})", + "is_today": True, + "message": f"Found top {len(result.get('top_events', []))} events for today ({today})" + } + + # Add some additional context + if result.get("total_events_analyzed", 0) == 0: + enhanced_result.update({ + "message": f"No events found for today ({today}). This might be because it's early in the day or no events have been tracked yet.", + "suggestion": "Try tracking some test events or check events from yesterday." + }) + + return enhanced_result + else: + # Handle case where get_top_events failed + return { + "success": False, + "top_events": [], + "date": today, + "date_description": f"Today ({today})", + "is_today": True, + "error": result.get("error", "Failed to get today's top events"), + "message": f"Could not retrieve events for today ({today})" + } + + except Exception as e: + from datetime import date + today = date.today().strftime('%Y-%m-%d') + return { + "success": False, + "top_events": [], + "date": today, + "date_description": f"Today ({today})", + "is_today": True, + "error": f"Failed to get today's top events: {str(e)}" + } + +async def get_events( + project_id: str +) -> List[str]: + """Get event names for the given Mixpanel project. + + This tool retrieves all event names that have been tracked in the specified project. + Useful for discovering what events are available for analysis. + + Args: + project_id: The Mixpanel project ID to get event names for + + Returns: + List of event names (strings) for the project + """ + try: + if not project_id: + raise ValueError("project_id is required") + + # Use the Query API endpoint to get event names + params = { + "project_id": project_id, + "type": "general" # Default type for event names + } + + result = await MixpanelQueryClient.make_request( + "GET", + "/query/events/names", + params=params + ) + + # The API returns a list of event names directly + if isinstance(result, list): + return result + elif isinstance(result, dict) and "data" in result: + # Sometimes the response might be wrapped + data = result["data"] + if isinstance(data, list): + return data + else: + logger.warning(f"Unexpected data format: {data}") + return [] + else: + logger.warning(f"Unexpected response format: {result}") + return [] + + except Exception as e: + logger.exception(f"Error getting event names: {e}") + raise + +async def get_event_properties( + project_id: str, + event: str +) -> List[str]: + """Get available properties for a specific event in a Mixpanel project. + + This returns the list of event property keys that can be used for filtering + or aggregation in queries for the provided event name and project. + + Args: + project_id: Mixpanel project ID + event: Event name (e.g., "AI Prompt Sent") + + Returns: + List of property names (strings) + """ + try: + if not project_id: + raise ValueError("project_id is required") + if not event: + raise ValueError("event is required") + + params = { + "project_id": project_id, + "event": event, + } + + # Query API endpoint for event properties, mirroring naming used for events/names + result = await MixpanelQueryClient.make_request( + "GET", + "/query/events/properties", + params=params, + ) + + # The API commonly returns a bare list; handle wrapped responses too + if isinstance(result, list): + return result + if isinstance(result, dict): + # Some responses may be wrapped like { "data": [...] } + data = result.get("data") + if isinstance(data, list): + return data + # Or keyed by the event name + by_event = result.get(event) + if isinstance(by_event, list): + return by_event + logger.warning(f"Unexpected event properties response format: {result}") + return [] + + logger.warning(f"Unexpected response type for event properties: {type(result)}") + return [] + + except Exception as e: + logger.exception(f"Error getting event properties: {e}") + raise + +async def get_event_property_values( + project_id: str, + event: str, + property_name: str +) -> List[str]: + """Get distinct values for a specific event property in a Mixpanel project. + + This returns the list of unique values that have been seen for the given + event's property, useful for building filters and understanding taxonomy. + + Args: + project_id: Mixpanel project ID + event: Event name (e.g., "AI Prompt Sent") + property_name: Property name (e.g., "utm_source") + + Returns: + List of property values (strings) + """ + try: + if not project_id: + raise ValueError("project_id is required") + if not event: + raise ValueError("event is required") + if not property_name: + raise ValueError("property is required") + + params = { + "project_id": project_id, + "event": event, + # Mixpanel expects the property key under the `name` query param + "name": property_name, + } + + # Query API endpoint for property values of an event + result = await MixpanelQueryClient.make_request( + "GET", + "/query/events/properties/values", + params=params, + ) + + # API typically returns a list of strings; handle wrapped responses too + if isinstance(result, list): + return result + if isinstance(result, dict): + data = result.get("data") + if isinstance(data, list): + return data + logger.warning(f"Unexpected event property values response format: {result}") + return [] + + logger.warning(f"Unexpected response type for event property values: {type(result)}") + return [] + + except Exception as e: + logger.exception(f"Error getting event property values: {e}") + raise \ No newline at end of file diff --git a/mcp_servers/mixpanel/tools/frequency.py b/mcp_servers/mixpanel/tools/frequency.py new file mode 100644 index 00000000..da8ddeaa --- /dev/null +++ b/mcp_servers/mixpanel/tools/frequency.py @@ -0,0 +1,102 @@ +import logging +from typing import Any, Dict, Optional +from datetime import date, timedelta + +from .base import MixpanelQueryClient + +logger = logging.getLogger(__name__) + + +async def run_frequency_query( + project_id: str, + event: str, + born_event: Optional[str] = None, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + unit: Optional[str] = "day", + addiction_unit: Optional[str] = "hour", + metric: Optional[str] = "unique", + where: Optional[str] = None, + on: Optional[str] = None, +) -> Dict[str, Any]: + """Run Mixpanel Frequency (Addiction) query via Query API. + + Args: + project_id: Mixpanel project id. + event: Retention event to analyze. + born_event: Cohort/born event. If not provided, defaults to the same as `event`. + from_date: Start date (YYYY-MM-DD). Defaults to 30 days ago. + to_date: End date (YYYY-MM-DD). Defaults to today. + unit: One of {day, week, month}. Granularity of cohorts/time buckets. + addiction_unit: One of {hour, day, week}. Sub-interval used for frequency bins. + metric: One of {general, unique}. general = raw counts, unique = distinct users. + where: Optional boolean expression filter. + on: Optional segmentation property or computed key. + + Returns: + Dict[str, Any] response as returned by Mixpanel Query API. + """ + if not project_id: + raise ValueError("project_id is required") + if not event: + raise ValueError("event is required") + + # Default dates: last 30 days + if not to_date: + to_date = date.today().strftime("%Y-%m-%d") + if not from_date: + from_date = (date.today() - timedelta(days=30)).strftime("%Y-%m-%d") + + # Normalize enums + allowed_units = {"day", "week", "month"} + allowed_addiction_units = {"hour", "day", "week"} + allowed_metrics = {"general", "unique"} + + if unit not in allowed_units: + raise ValueError(f"unit must be one of {sorted(allowed_units)}") + if addiction_unit not in allowed_addiction_units: + raise ValueError(f"addiction_unit must be one of {sorted(allowed_addiction_units)}") + if metric not in allowed_metrics: + raise ValueError(f"metric must be one of {sorted(allowed_metrics)}") + + # Build params for Query API. Endpoint path aligns with other /query/* calls in this project. + params: Dict[str, Any] = { + "project_id": project_id, + "from_date": from_date, + "to_date": to_date, + "unit": unit, + "addiction_unit": addiction_unit, + "metric": metric, + } + + # Mixpanel Query API commonly expects event names as JSON-encoded arrays in query params. + # Provide both single-string fallback and encoded-array form for broader compatibility. + params["event"] = f'[{event!r}]' if event else None + if born_event: + params["born_event"] = f'[{born_event!r}]' + else: + params["born_event"] = f'[{event!r}]' + + if where: + params["where"] = where + if on: + params["on"] = on + + # Remove any Nones from params + params = {k: v for k, v in params.items() if v is not None} + + # Execute request + try: + # Endpoint chosen to match Query API naming used elsewhere (e.g., /query/events/names) + endpoint = "/query/retention/frequency" + result = await MixpanelQueryClient.make_request("GET", endpoint, params=params) + return result + except Exception as e: + logger.exception("Error running frequency query: %s", e) + return { + "success": False, + "error": f"Failed to run frequency query: {str(e)}", + "params": params, + } + + diff --git a/mcp_servers/mixpanel/tools/funnels.py b/mcp_servers/mixpanel/tools/funnels.py new file mode 100644 index 00000000..ad91e960 --- /dev/null +++ b/mcp_servers/mixpanel/tools/funnels.py @@ -0,0 +1,74 @@ +import logging +import json +from typing import Any, Dict, List, Optional +import httpx + +from .base import MixpanelQueryClient + +logger = logging.getLogger(__name__) + +async def run_funnels_query( + project_id: str, + events: List[Dict[str, Any]] | str, + from_date: str, + to_date: str, + count_type: Optional[str] = "unique", +) -> Dict[str, Any]: + """Run a funnel query via Mixpanel Query API. + + Measures conversion through a sequence of steps. + + Args: + project_id: Mixpanel project id. + events: Ordered list of funnel steps (each item typically has at least an 'event' and optional 'step_label'), + or a pre-serialized JSON string for the same. + from_date: Start date YYYY-MM-DD. + to_date: End date YYYY-MM-DD. + count_type: One of {unique, general}. Defaults to unique (distinct users). + + Returns: + Dict[str, Any] response as returned by Mixpanel. + """ + if not project_id: + raise ValueError("project_id is required") + if not events: + raise ValueError("events is required and must define at least one step") + if not from_date or not to_date: + raise ValueError("from_date and to_date are required in YYYY-MM-DD format") + + allowed_count_types = {"unique", "general"} + if count_type and count_type not in allowed_count_types: + raise ValueError(f"count_type must be one of {sorted(allowed_count_types)}") + + # Mixpanel funnels API expects the 'events' parameter as a JSON-encoded array string + if isinstance(events, str): + events_param = events + else: + try: + events_param = json.dumps(events) + except Exception as e: + raise ValueError(f"events must be serializable to JSON array of step objects: {e}") + + params: Dict[str, Any] = { + "project_id": str(project_id), + "events": events_param, + "from_date": from_date, + "to_date": to_date, + "count_type": count_type or "unique", + } + + # Remove any Nones + params = {k: v for k, v in params.items() if v is not None} + + try: + # Historical Mixpanel endpoint for funnels + endpoint = "/2.0/funnels" + result = await MixpanelQueryClient.make_request("GET", endpoint, params=params) + return result + except Exception as e: + logger.exception("Error running funnels query: %s", e) + return { + "success": False, + "error": f"Failed to run funnels query: {str(e)}", + "params": params, + } \ No newline at end of file diff --git a/mcp_servers/mixpanel/tools/projects.py b/mcp_servers/mixpanel/tools/projects.py new file mode 100644 index 00000000..f6b41297 --- /dev/null +++ b/mcp_servers/mixpanel/tools/projects.py @@ -0,0 +1,78 @@ +import logging +from typing import Any, Dict, Optional + +from .base import MixpanelAppAPIClient + +logger = logging.getLogger(__name__) + +async def get_projects() -> Dict[str, Any]: + """Get all projects that are accessible to the current service account user. + + This tool retrieves all projects the service account has access to, + allowing users to select a project for operations that require a project_id. + + Returns: + Dict containing accessible projects in format: {"project_id": {"id": project_id, "name": "project_name"}} + """ + try: + # Use the app API endpoint to get projects + result = await MixpanelAppAPIClient.make_request( + "GET", + "/me" + ) + + if isinstance(result, dict): + # Extract projects information + projects = {} + + # Check if response has results.projects structure + if "results" in result and "projects" in result.get("results", {}): + # Handle /me endpoint response where projects is a dict + projects_dict = result["results"]["projects"] + for project_id, project in projects_dict.items(): + projects[project_id] = { + "id": int(project_id) if project_id.isdigit() else project_id, + "name": project.get("name", "") + } + elif "results" in result and isinstance(result.get("results"), list): + # Handle paginated list response + for item in result.get("results", []): + pid = str(item.get("id")) + projects[pid] = { + "id": item.get("id"), + "name": item.get("name", "") + } + elif "projects" in result and isinstance(result.get("projects"), list): + # Handle direct projects list response + for project in result.get("projects", []): + pid = str(project.get("id")) + projects[pid] = { + "id": project.get("id"), + "name": project.get("name", "") + } + elif "projects" in result and isinstance(result.get("projects"), dict): + # Handle projects as dict response + projects_dict = result["projects"] + for project_id, project in projects_dict.items(): + projects[project_id] = { + "id": int(project_id) if project_id.isdigit() else project_id, + "name": project.get("name", "") + } + else: + # Try to extract project info from the response directly + if result.get("id"): + pid = str(result.get("id")) + projects[pid] = { + "id": result.get("id"), + "name": result.get("name", "") + } + + return projects + else: + return {} + + except Exception as e: + logger.exception(f"Error getting projects: {e}") + return {} + + diff --git a/mcp_servers/mixpanel/tools/retention.py b/mcp_servers/mixpanel/tools/retention.py new file mode 100644 index 00000000..cd832fd3 --- /dev/null +++ b/mcp_servers/mixpanel/tools/retention.py @@ -0,0 +1,99 @@ +import logging +from typing import Any, Dict, Optional +from datetime import date, timedelta + +from .base import MixpanelQueryClient + +logger = logging.getLogger(__name__) + + +async def run_retention_query( + project_id: str, + event: str, + born_event: Optional[str] = None, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + unit: Optional[str] = "day", + retention_type: Optional[str] = "birth", + interval_count: Optional[int] = None, + metric: Optional[str] = "unique", + where: Optional[str] = None, + on: Optional[str] = None, +) -> Dict[str, Any]: + """Run Mixpanel Retention query via Query API. + + Args: + project_id: Mixpanel project id. + event: Retention event to analyze. + born_event: Cohort/born event. If not provided, defaults to the same as `event`. + from_date: Start date (YYYY-MM-DD). Defaults to 30 days ago. + to_date: End date (YYYY-MM-DD). Defaults to today. + unit: One of {day, week, month}. Granularity for cohorts. + retention_type: One of {birth, compounded}. + interval_count: Number of intervals to include. Optional; API may infer from range. + metric: One of {general, unique}. general = raw counts, unique = distinct users. + where: Optional boolean expression filter. + on: Optional segmentation property or computed key. + + Returns: + Dict[str, Any] response as returned by Mixpanel Query API. + """ + if not project_id: + raise ValueError("project_id is required") + if not event: + raise ValueError("event is required") + + # Default dates: last 30 days + if not to_date: + to_date = date.today().strftime("%Y-%m-%d") + if not from_date: + from_date = (date.today() - timedelta(days=30)).strftime("%Y-%m-%d") + + # Normalize enums + allowed_units = {"day", "week", "month"} + allowed_metrics = {"general", "unique"} + allowed_retention_types = {"birth", "compounded"} + + if unit not in allowed_units: + raise ValueError(f"unit must be one of {sorted(allowed_units)}") + if metric not in allowed_metrics: + raise ValueError(f"metric must be one of {sorted(allowed_metrics)}") + if retention_type not in allowed_retention_types: + raise ValueError(f"retention_type must be one of {sorted(allowed_retention_types)}") + + params: Dict[str, Any] = { + "project_id": str(project_id), + "from_date": from_date, + "to_date": to_date, + "unit": unit, + "metric": metric, + "retention_type": retention_type, + } + + # Mixpanel Query API commonly expects event names as JSON-encoded arrays in query params. + params["event"] = f'[{event!r}]' + params["born_event"] = f'[{(born_event or event)!r}]' + + if interval_count is not None: + params["interval_count"] = int(interval_count) + if where: + params["where"] = where + if on: + params["on"] = on + + # Remove any Nones from params + params = {k: v for k, v in params.items() if v is not None} + + try: + endpoint = "/query/retention" + result = await MixpanelQueryClient.make_request("GET", endpoint, params=params) + return result + except Exception as e: + logger.exception("Error running retention query: %s", e) + return { + "success": False, + "error": f"Failed to run retention query: {str(e)}", + "params": params, + } + + diff --git a/mcp_servers/mixpanel/tools/segmentation.py b/mcp_servers/mixpanel/tools/segmentation.py new file mode 100644 index 00000000..14991fa0 --- /dev/null +++ b/mcp_servers/mixpanel/tools/segmentation.py @@ -0,0 +1,88 @@ +import logging +from typing import Any, Dict, Optional + +from .base import MixpanelQueryClient + +logger = logging.getLogger(__name__) + + +async def run_segmentation_query( + project_id: str, + event: str, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + unit: Optional[str] = "day", + type: Optional[str] = "general", + where: Optional[str] = None, + on: Optional[str] = None, + numerical_aggregation: Optional[str] = None, +) -> Dict[str, Any]: + """Run Mixpanel Segmentation query via Query API. + + Args: + project_id: Mixpanel project id. + event: Event name to analyze. + from_date: Start date (YYYY-MM-DD). Defaults to None (defer to server defaults). + to_date: End date (YYYY-MM-DD). Defaults to None (defer to server defaults). + unit: One of {hour, day, month}. Time bucketing for results. + type: One of {general, unique}. general = raw counts, unique = distinct users. + where: Optional boolean expression filter. + on: Optional segmentation property or computed key. + numerical_aggregation: Optional numeric aggregation when grouping by numeric field. + + Returns: + Dict[str, Any] response as returned by Mixpanel Query API. + """ + if not project_id: + raise ValueError("project_id is required") + if not event: + raise ValueError("event is required") + + # Dates are optional; if omitted, allow API/server defaults to apply + + # Normalize enums + allowed_units = {"hour", "day", "month"} + allowed_types = {"general", "unique"} + allowed_num_aggs = {"sum", "average", "buckets"} + + if unit not in allowed_units: + raise ValueError(f"unit must be one of {sorted(allowed_units)}") + if type not in allowed_types: + raise ValueError(f"type must be one of {sorted(allowed_types)}") + if numerical_aggregation is not None and numerical_aggregation not in allowed_num_aggs: + raise ValueError(f"numerical_aggregation must be one of {sorted(allowed_num_aggs)}") + + # Build params for Query API. Use the stable 2.0 segmentation path. + params: Dict[str, Any] = { + "event": event, + "project_id": str(project_id), + "from_date": from_date, + "to_date": to_date, + "unit": unit, + "type": type, + } + + if where: + params["where"] = where + if on: + params["on"] = on + if numerical_aggregation: + params["numerical_aggregation"] = numerical_aggregation + + # Remove any Nones from params + params = {k: v for k, v in params.items() if v is not None} + + try: + # Use Query API segmentation endpoint under https://mixpanel.com/api + endpoint = "/query/segmentation" + result = await MixpanelQueryClient.make_request("GET", endpoint, params=params) + return result + except Exception as e: + logger.exception("Error running segmentation query: %s", e) + return { + "success": False, + "error": f"Failed to run segmentation query: {str(e)}", + "params": params, + } + + diff --git a/mcp_servers/monday/.env.example b/mcp_servers/monday/.env.example new file mode 100644 index 00000000..ec1af265 --- /dev/null +++ b/mcp_servers/monday/.env.example @@ -0,0 +1 @@ +MONDAY_API_KEY= \ No newline at end of file diff --git a/mcp_servers/monday/.eslintrc.json b/mcp_servers/monday/.eslintrc.json new file mode 100644 index 00000000..52c9c608 --- /dev/null +++ b/mcp_servers/monday/.eslintrc.json @@ -0,0 +1,14 @@ +{ + "env": { + "node": true, + "es2022": true + }, + "extends": [ + "eslint:recommended", + "@typescript-eslint/recommended", + "prettier" + ], + "parser": "@typescript-eslint/parser", + "plugins": ["@typescript-eslint"], + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/monday/Dockerfile b/mcp_servers/monday/Dockerfile new file mode 100644 index 00000000..fcf8dddd --- /dev/null +++ b/mcp_servers/monday/Dockerfile @@ -0,0 +1,32 @@ +FROM node:22.12-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/monday/package.json mcp_servers/monday/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/monday . + +# Build the application using TypeScript +RUN npm run build + +FROM node:22-alpine AS release + +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +ENV NODE_ENV=production + +EXPOSE 5000 + +WORKDIR /app + +RUN npm ci --ignore-scripts --omit-dev + +ENTRYPOINT ["node", "dist/index.js"] \ No newline at end of file diff --git a/mcp_servers/monday/README.md b/mcp_servers/monday/README.md new file mode 100644 index 00000000..61ebfe7d --- /dev/null +++ b/mcp_servers/monday/README.md @@ -0,0 +1,78 @@ +# Monday.com MCP Server + +A Model Context Protocol (MCP) server for Monday.com integration. Manage boards, items, and workflows using Monday.com's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Monday.com with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MONDAY", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/monday-mcp-server:latest + + +# Run Monday.com MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/monday-mcp-server:latest + + +# Run Monday.com MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_monday_api_token_here"}' \ + ghcr.io/klavis-ai/monday-mcp-server:latest +``` + +**OAuth Setup:** Monday.com requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Board Management**: Create, read, update Monday.com boards +- **Item Operations**: Manage board items and their properties +- **Column Management**: Handle board columns and data types +- **Team Collaboration**: Manage users and team assignments +- **Workflow Automation**: Handle Monday.com automations and integrations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/monday/package-lock.json b/mcp_servers/monday/package-lock.json new file mode 100644 index 00000000..a1428adb --- /dev/null +++ b/mcp_servers/monday/package-lock.json @@ -0,0 +1,3496 @@ +{ + "name": "@klavis-ai/mcp-server-monday", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-monday", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@mondaydotcomorg/api": "^10.0.5", + "dotenv": "^17.0.1", + "fastmcp": "^3.8.2", + "zod": "^3.25.67" + }, + "bin": { + "monday-mcp": "dist/index.js" + }, + "devDependencies": { + "@types/express": "^5.0.3", + "@types/node": "^24.0.8", + "@typescript-eslint/eslint-plugin": "^8.35.1", + "@typescript-eslint/parser": "^8.35.1", + "eslint": "^9.30.0", + "eslint-config-prettier": "^10.1.5", + "prettier": "^3.6.2", + "typescript": "^5.8.3" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-array/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", + "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "9.30.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.30.0.tgz", + "integrity": "sha512-Wzw3wQwPvc9sHM+NjakWTcPx11mbZyiYHuwWa/QfZ7cIRX7WK54PSk7bdyXDaoaopUcMatv1zaQvOAAO8hCdww==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.15.1", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@graphql-typed-document-node/core": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz", + "integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==", + "license": "MIT", + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.3.tgz", + "integrity": "sha512-bGwA78F/U5G2jrnsdRkPY3IwIwZeWUEfb5o764b79lb0rJmMT76TLwKhdNZOWakOQtedYefwIR4emisEMvInKA==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@mondaydotcomorg/api": { + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@mondaydotcomorg/api/-/api-10.0.5.tgz", + "integrity": "sha512-kKg9P64S1mHTki42mdynWNuy41u79FPTTGpNYTRiRZ3jDJYIdaq/aHoMhZ//YvIxAvYHZnKiVWEOhRnHFsrQnA==", + "license": "MIT", + "dependencies": { + "graphql": "16.8.2", + "graphql-request": "^6.1.0", + "graphql-tag": "^2.12.6" + }, + "engines": { + "node": ">= 16.20.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "license": "MIT" + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "license": "MIT" + }, + "node_modules/@tokenizer/inflate": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz", + "integrity": "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "fflate": "^0.8.2", + "token-types": "^6.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "license": "MIT" + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/express": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz", + "integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.7.tgz", + "integrity": "sha512-R+33OsgWw7rOhD1emjU7dzCDHucJrgJXMA5PYCzJxVil0dsyx5iBEPHqpPfiKNJQb7lZ1vxwoLR4Z87bBUpeGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.0.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.8.tgz", + "integrity": "sha512-WytNrFSgWO/esSH9NbpWUfTMGQwCGIKfCmNlmFDNiI5gGhgMmEA+V1AEvKLeBNvvtBnailJtkrEa2OIISwrVAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.8.0" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", + "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.35.1.tgz", + "integrity": "sha512-9XNTlo7P7RJxbVeICaIIIEipqxLKguyh+3UbXuT2XQuFp6d8VOeDEGuz5IiX0dgZo8CiI6aOFLg4e8cF71SFVg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/type-utils": "8.35.1", + "@typescript-eslint/utils": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.35.1", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.35.1.tgz", + "integrity": "sha512-3MyiDfrfLeK06bi/g9DqJxP5pV74LNv4rFTyvGDmT3x2p1yp1lOd+qYZfiRPIOf/oON+WRZR5wxxuF85qOar+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/typescript-estree": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.35.1.tgz", + "integrity": "sha512-VYxn/5LOpVxADAuP3NrnxxHYfzVtQzLKeldIhDhzC8UHaiQvYlXvKuVho1qLduFbJjjy5U5bkGwa3rUGUb1Q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.35.1", + "@typescript-eslint/types": "^8.35.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.35.1.tgz", + "integrity": "sha512-s/Bpd4i7ht2934nG+UoSPlYXd08KYz3bmjLEb7Ye1UVob0d1ENiT3lY8bsCmik4RqfSbPw9xJJHbugpPpP5JUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.35.1.tgz", + "integrity": "sha512-K5/U9VmT9dTHoNowWZpz+/TObS3xqC5h0xAIjXPw+MNcKV9qg6eSatEnmeAwkjHijhACH0/N7bkhKvbt1+DXWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.35.1.tgz", + "integrity": "sha512-HOrUBlfVRz5W2LIKpXzZoy6VTZzMu2n8q9C2V/cFngIC5U1nStJgv0tMV4sZPzdf4wQm9/ToWUFPMN9Vq9VJQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "8.35.1", + "@typescript-eslint/utils": "8.35.1", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.35.1.tgz", + "integrity": "sha512-q/O04vVnKHfrrhNAscndAn1tuQhIkwqnaW+eu5waD5IPts2eX1dgJxgqcPx5BX109/qAz7IG6VrEPTOYKCNfRQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.35.1.tgz", + "integrity": "sha512-Vvpuvj4tBxIka7cPs6Y1uvM7gJgdF5Uu9F+mBJBPY4MhvjrjWGK4H0lVgLJd/8PWZ23FTqsaJaLEkBCFUk8Y9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.35.1", + "@typescript-eslint/tsconfig-utils": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.35.1.tgz", + "integrity": "sha512-lhnwatFmOFcazAsUm3ZnZFpXSxiwoa1Lj50HphnDe1Et01NF4+hrdXONSUHIcbVu2eFb1bAf+5yjXkGVkXBKAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/typescript-estree": "8.35.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.35.1.tgz", + "integrity": "sha512-VRwixir4zBWCSTP/ljEo091lbpypz57PoeAQ9imjG+vbeof9LplljsL1mos4ccG6H9IjfrVGM359RozUnuFhpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.35.1", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cliui": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", + "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", + "license": "ISC", + "dependencies": { + "string-width": "^7.2.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cross-fetch": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.2.0.tgz", + "integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==", + "license": "MIT", + "dependencies": { + "node-fetch": "^2.7.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dotenv": { + "version": "17.0.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.0.1.tgz", + "integrity": "sha512-GLjkduuAL7IMJg/ZnOPm9AnWKJ82mSE2tzXLaJ/6hD6DhwGfZaXG77oB8qbReyiczNxnbxQKyh0OE5mXq0bAHA==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.30.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.30.0.tgz", + "integrity": "sha512-iN/SiPxmQu6EVkf+m1qpBxzUhE12YqFLOSySuOyVLJLEF9nzTf+h/1AJYc1JWzCnktggeNrjvQGLngDzXirU6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.14.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.30.0", + "@eslint/plugin-kit": "^0.3.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-prettier": { + "version": "10.1.5", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.5.tgz", + "integrity": "sha512-zc1UmCpNltmVY34vuLRV61r1K27sWuX39E+uyUnY8xS2Bex88VV9cugG+UZbRSRGtGyFboj+D8JODyme1plMpw==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "funding": { + "url": "https://opencollective.com/eslint-config-prettier" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.3.tgz", + "integrity": "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/execa": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.0.tgz", + "integrity": "sha512-jpWzZ1ZhwUmeWRhS7Qv3mhpOhLfwI+uAX4e5fOcXqwMR7EcJ0pj2kV1CVzHVMX/LphnKWD3LObjZCoJ71lKpHw==", + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^4.0.0", + "cross-spawn": "^7.0.6", + "figures": "^6.1.0", + "get-stream": "^9.0.0", + "human-signals": "^8.0.1", + "is-plain-obj": "^4.1.0", + "is-stream": "^4.0.1", + "npm-run-path": "^6.0.0", + "pretty-ms": "^9.2.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^4.0.0", + "yoctocolors": "^2.1.1" + }, + "engines": { + "node": "^18.19.0 || >=20.5.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastmcp": { + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/fastmcp/-/fastmcp-3.8.2.tgz", + "integrity": "sha512-GN8SBv2+vm+PYbckvT8ZVZvjxkkIaQhgoGp3WrJ28AlJVt1wmd8S332L7o7SfuFzyYA4kwT2ofEtYJtRlm/mcw==", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.13.2", + "@standard-schema/spec": "^1.0.0", + "execa": "^9.6.0", + "file-type": "^21.0.0", + "fuse.js": "^7.1.0", + "mcp-proxy": "^5.1.1", + "strict-event-emitter-types": "^2.0.0", + "undici": "^7.11.0", + "uri-templates": "^0.2.0", + "xsschema": "0.3.0-beta.5", + "yargs": "^18.0.0", + "zod": "^3.25.67", + "zod-to-json-schema": "^3.24.6" + }, + "bin": { + "fastmcp": "dist/bin/fastmcp.js" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "license": "MIT" + }, + "node_modules/figures": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-6.1.0.tgz", + "integrity": "sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==", + "license": "MIT", + "dependencies": { + "is-unicode-supported": "^2.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/file-type": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.0.0.tgz", + "integrity": "sha512-ek5xNX2YBYlXhiUXui3D/BXa3LdqPmoLJ7rqEx2bKJ7EAUEfmXgW0Das7Dc6Nr9MvqaOnIqiPV0mZk/r/UpNAg==", + "license": "MIT", + "dependencies": { + "@tokenizer/inflate": "^0.2.7", + "strtok3": "^10.2.2", + "token-types": "^6.0.0", + "uint8array-extras": "^1.4.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fuse.js": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-7.1.0.tgz", + "integrity": "sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=10" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", + "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/graphql": { + "version": "16.8.2", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.2.tgz", + "integrity": "sha512-cvVIBILwuoSyD54U4cF/UXDh5yAobhNV/tPygI4lZhgOIJQE/WLWC4waBRb4I6bDVYb3OVx3lfHbaQOEoUD5sg==", + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" + } + }, + "node_modules/graphql-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/graphql-request/-/graphql-request-6.1.0.tgz", + "integrity": "sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw==", + "license": "MIT", + "dependencies": { + "@graphql-typed-document-node/core": "^3.2.0", + "cross-fetch": "^3.1.5" + }, + "peerDependencies": { + "graphql": "14 - 16" + } + }, + "node_modules/graphql-tag": { + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.6.tgz", + "integrity": "sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/human-signals": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", + "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mcp-proxy": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/mcp-proxy/-/mcp-proxy-5.3.0.tgz", + "integrity": "sha512-dknV3cAOdxJXampGqeG56G1UWFWf+2Msmh+r7WbTBqrZXaOFiZh6TSmd3Eig2+QQ5eCkLjrdpVKyjnisUSkizA==", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.13.2", + "eventsource": "^4.0.0", + "yargs": "^18.0.0" + }, + "bin": { + "mcp-proxy": "dist/bin/mcp-proxy.js" + } + }, + "node_modules/mcp-proxy/node_modules/eventsource": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-4.0.0.tgz", + "integrity": "sha512-fvIkb9qZzdMxgZrEQDyll+9oJsyaVvY92I2Re+qK0qEJ+w5s0X3dtz+M0VAPOjP1gtU3iqWyjQ0G3nvd5CLZ2g==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/npm-run-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", + "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-ms": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", + "integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-ms": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.2.0.tgz", + "integrity": "sha512-4yf0QO/sllf/1zbZWYnvWw3NxCQwLXKzIj0G849LSufP15BXKM0rbD2Z3wVnkMfjdn/CB0Dpp444gYAACdsplg==", + "license": "MIT", + "dependencies": { + "parse-ms": "^4.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/strict-event-emitter-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz", + "integrity": "sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==", + "license": "ISC" + }, + "node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-final-newline": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", + "integrity": "sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strtok3": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-10.3.1.tgz", + "integrity": "sha512-3JWEZM6mfix/GCJBBUrkA8p2Id2pBkyTkVCJKto55w080QBKZ+8R171fGrbiSp+yMO/u6F8/yUh7K4V9K+YCnw==", + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/token-types": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.0.3.tgz", + "integrity": "sha512-IKJ6EzuPPWtKtEIEPpIdXv9j5j2LGJEYk0CKY2efgKoYKLBiZdh6iQkLVBow/CB3phyWAWCyk+bZeaimJn6uRQ==", + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uint8array-extras": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.4.0.tgz", + "integrity": "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/undici": { + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.11.0.tgz", + "integrity": "sha512-heTSIac3iLhsmZhUCjyS3JQEkZELateufzZuBaVM5RHXdSBMb1LPMQf5x+FH7qjsZYDP0ttAc3nnVpUB+wYbOg==", + "license": "MIT", + "engines": { + "node": ">=20.18.1" + } + }, + "node_modules/undici-types": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz", + "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/uri-templates": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/uri-templates/-/uri-templates-0.2.0.tgz", + "integrity": "sha512-EWkjYEN0L6KOfEoOH6Wj4ghQqU7eBZMJqRHQnxQAq+dSEzRPClkWjf8557HkWQXF6BrAUoLSAyy9i3RVTliaNg==", + "license": "http://geraintluff.github.io/tv4/LICENSE.txt" + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/xsschema": { + "version": "0.3.0-beta.5", + "resolved": "https://registry.npmjs.org/xsschema/-/xsschema-0.3.0-beta.5.tgz", + "integrity": "sha512-NYMPzYegZpL+zpTMswdg41+mmcyCkym38nvsZvPAHPDAOiOT8zAXrE28XvwyQDMsLuGE51IsJ+07RHYqTh4PUQ==", + "license": "MIT", + "peerDependencies": { + "@valibot/to-json-schema": "^1.0.0", + "arktype": "^2.1.16", + "effect": "^3.14.5", + "sury": "^10.0.0-rc", + "zod": "^3.25.0", + "zod-to-json-schema": "^3.24.5" + }, + "peerDependenciesMeta": { + "@valibot/to-json-schema": { + "optional": true + }, + "arktype": { + "optional": true + }, + "effect": { + "optional": true + }, + "sury": { + "optional": true + }, + "zod": { + "optional": true + }, + "zod-to-json-schema": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "18.0.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz", + "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", + "license": "MIT", + "dependencies": { + "cliui": "^9.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "string-width": "^7.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^22.0.0" + }, + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=23" + } + }, + "node_modules/yargs-parser": { + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz", + "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==", + "license": "ISC", + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=23" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoctocolors": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.1.tgz", + "integrity": "sha512-GQHQqAopRhwU8Kt1DDM8NjibDXHC8eoh1erhGAJPEyveY9qqVeXvVikNKrDz69sHowPMorbPUrH/mx8c50eiBQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.25.67", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.67.tgz", + "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/monday/package.json b/mcp_servers/monday/package.json new file mode 100644 index 00000000..c3cccfa9 --- /dev/null +++ b/mcp_servers/monday/package.json @@ -0,0 +1,58 @@ +{ + "name": "@klavis-ai/mcp-server-monday", + "version": "1.0.0", + "description": "MCP server for Monday.com API integration", + "keywords": [ + "monday", + "mcp", + "klavis.ai" + ], + "homepage": "https://github.com/Klavis-AI/klavis#readme", + "bugs": { + "url": "https://github.com/Klavis-AI/klavis/issues" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Klavis-AI/klavis.git" + }, + "license": "MIT", + "author": "Klavis AI", + "main": "src/index.ts", + "scripts": { + "build": "tsc", + "start": "node dist/index.js", + "lint": "eslint src/**/*.ts", + "lint:fix": "eslint src/**/*.ts --fix", + "format": "prettier --write .", + "prepare": "npm run build", + "publish": "npm run build && npm publish" + }, + "engines": { + "node": ">=18.0.0" + }, + "bin": { + "monday-mcp": "dist/index.js" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "dependencies": { + "@mondaydotcomorg/api": "^10.0.5", + "dotenv": "^17.0.1", + "fastmcp": "^3.8.2", + "zod": "^3.25.67" + }, + "devDependencies": { + "@types/express": "^5.0.3", + "@types/node": "^24.0.8", + "@typescript-eslint/eslint-plugin": "^8.35.1", + "@typescript-eslint/parser": "^8.35.1", + "eslint": "^9.30.0", + "eslint-config-prettier": "^10.1.5", + "prettier": "^3.6.2", + "typescript": "^5.8.3" + } +} diff --git a/mcp_servers/monday/src/index.ts b/mcp_servers/monday/src/index.ts new file mode 100644 index 00000000..cb81a0aa --- /dev/null +++ b/mcp_servers/monday/src/index.ts @@ -0,0 +1,171 @@ +import dotenv from 'dotenv'; +import { FastMCP } from 'fastmcp'; +import { IncomingMessage } from 'http'; +import { getUsersByName, getUsersToolSchema } from './tools'; +import { + createBoard, + createBoardToolSchema, + getBoards, + getBoardSchema, + getBoardSchemaToolSchema, +} from './tools/boards'; +import { + createColumn, + createColumnToolSchema, + deleteColumn, + deleteColumnToolSchema, +} from './tools/columns'; +import { + changeItemColumnValues, + changeItemColumnValuesToolSchema, + createItem, + createItemToolSchema, + createUpdate, + createUpdateToolSchema, + deleteItem, + deleteItemToolSchema, + getBoardItemsByName, + getBoardItemsByNameToolSchema, + moveItemToGroup, + moveItemToGroupToolSchema, +} from './tools/items'; + +dotenv.config(); + +function extractAccessToken(req: IncomingMessage): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Monday access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +const server = new FastMCP({ + name: 'monday', + version: '1.0.0', + authenticate: async (request) => { + const token = extractAccessToken(request); + if (!token) { + throw new Error( + 'Error: Monday API token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.', + ); + } + return { token }; + }, +}); + +server.addTool({ + name: 'monday_get_users_by_name', + description: 'Retrieve user information by name or partial name', + parameters: getUsersToolSchema, + annotations: { category: 'MONDAY_USER', readOnlyHint: true } as any, + execute: async (args, { session }) => await getUsersByName(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_get_board_schema', + description: 'Get board schema (columns and groups) by board id', + parameters: getBoardSchemaToolSchema, + annotations: { category: 'MONDAY_BOARD', readOnlyHint: true } as any, + execute: async (args, { session }) => await getBoardSchema(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_create_board', + description: 'Create a new monday.com board with specified columns and groups', + parameters: createBoardToolSchema, + annotations: { category: 'MONDAY_BOARD' } as any, + execute: async (args, { session }) => await createBoard(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_get_boards', + description: 'Get all the monday.com boards', + annotations: { category: 'MONDAY_BOARD', readOnlyHint: true } as any, + execute: async (args, { session }) => await getBoards(session?.token as string), +}); + +server.addTool({ + name: 'monday_create_column', + description: 'Create a new column in a monday.com board', + parameters: createColumnToolSchema, + annotations: { category: 'MONDAY_COLUMN' } as any, + execute: async (args, { session }) => await createColumn(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_delete_column', + description: 'Delete a column from a monday.com board', + parameters: deleteColumnToolSchema, + annotations: { category: 'MONDAY_COLUMN' } as any, + execute: async (args, { session }) => await deleteColumn(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_create_item', + description: 'Create a new item in a monday.com board', + parameters: createItemToolSchema, + annotations: { category: 'MONDAY_ITEM' } as any, + execute: async (args, { session }) => await createItem(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_get_board_items_by_name', + description: 'Get items by name from a monday.com board', + parameters: getBoardItemsByNameToolSchema, + annotations: { category: 'MONDAY_ITEM', readOnlyHint: true } as any, + execute: async (args, { session }) => await getBoardItemsByName(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_create_update', + description: 'Create a new update for an item in a monday.com board', + parameters: createUpdateToolSchema, + annotations: { category: 'MONDAY_UPDATE' } as any, + execute: async (args, { session }) => await createUpdate(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_delete_item', + description: 'Delete an item from a monday.com board', + parameters: deleteItemToolSchema, + annotations: { category: 'MONDAY_ITEM' } as any, + execute: async (args, { session }) => await deleteItem(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_change_item_column_values', + description: 'Change the column values of an item in a monday.com board', + parameters: changeItemColumnValuesToolSchema, + annotations: { category: 'MONDAY_ITEM' } as any, + execute: async (args, { session }) => + await changeItemColumnValues(args, session?.token as string), +}); + +server.addTool({ + name: 'monday_move_item_to_group', + description: 'Move an item to a group in a monday.com board', + parameters: moveItemToGroupToolSchema, + annotations: { category: 'MONDAY_ITEM' } as any, + execute: async (args, { session }) => await moveItemToGroup(args, session?.token as string), +}); + +server.start({ + httpStream: { + port: 5000, + endpoint: '/mcp', + }, + transportType: 'httpStream', +}); diff --git a/mcp_servers/monday/src/tools/base.ts b/mcp_servers/monday/src/tools/base.ts new file mode 100644 index 00000000..b2a4c705 --- /dev/null +++ b/mcp_servers/monday/src/tools/base.ts @@ -0,0 +1,5 @@ +import { ApiClient } from '@mondaydotcomorg/api'; + +export function createClient(token: string): ApiClient { + return new ApiClient({ token }); +} diff --git a/mcp_servers/monday/src/tools/boards.ts b/mcp_servers/monday/src/tools/boards.ts new file mode 100644 index 00000000..d148ece7 --- /dev/null +++ b/mcp_servers/monday/src/tools/boards.ts @@ -0,0 +1,51 @@ +import { BoardKind } from '@mondaydotcomorg/api'; +import { z } from 'zod'; +import { createClient } from './base'; +import { createBoardQuery, getBoardSchemaQuery, getBoardsQuery } from './queries.graphql'; + +export const getBoardSchemaToolSchema = z.object({ + boardId: z.string().describe('The ID of the board to get the schema for'), +}); + +export const createBoardToolSchema = z.object({ + boardName: z.string().describe('The name of the board to create'), + boardKind: z + .nativeEnum(BoardKind) + .default(BoardKind.Public) + .describe('The kind of board to create'), + boardDescription: z.string().optional().describe('The description of the board to create'), + workspaceId: z.string().optional().describe('The ID of the workspace to create the board in'), +}); + +export const getBoards = async (token: string) => { + const boards = await createClient(token).request(getBoardsQuery); + return { + type: 'text' as const, + text: JSON.stringify(boards), + }; +}; +export const getBoardSchema = async ( + args: z.infer, + token: string, +) => { + const { boardId } = args; + const board = await createClient(token).request(getBoardSchemaQuery, { boardId }); + return { + type: 'text' as const, + text: JSON.stringify(board), + }; +}; + +export const createBoard = async (args: z.infer, token: string) => { + const { boardName, boardKind, boardDescription, workspaceId } = args; + const board = await createClient(token).request(createBoardQuery, { + boardName, + boardKind, + boardDescription, + workspaceId, + }); + return { + type: 'text' as const, + text: JSON.stringify(board), + }; +}; diff --git a/mcp_servers/monday/src/tools/columns.ts b/mcp_servers/monday/src/tools/columns.ts new file mode 100644 index 00000000..734e091b --- /dev/null +++ b/mcp_servers/monday/src/tools/columns.ts @@ -0,0 +1,63 @@ +import { ColumnType } from '@mondaydotcomorg/api'; +import { z } from 'zod'; +import { createClient } from './base'; +import { createColumnQuery, deleteColumnQuery } from './queries.graphql'; + +export const createColumnToolSchema = z.object({ + boardId: z.string().describe('The id of the board to create the column in'), + columnType: z.nativeEnum(ColumnType).describe('The type of the column to be created'), + columnTitle: z.string().describe('The title of the column to be created'), + columnDescription: z.string().optional().describe('The description of the column to be created'), + columnSettings: z + .array(z.string()) + .optional() + .describe( + "The default values for the new column (relevant only for column type 'status' or 'dropdown') when possible make the values relevant to the user's request", + ), +}); + +export const deleteColumnToolSchema = z.object({ + boardId: z.string().describe('The id of the board to delete the column from'), + columnId: z.string().describe('The id of the column to delete'), +}); + +export const createColumn = async (args: z.infer, token: string) => { + const { boardId, columnType, columnTitle, columnDescription, columnSettings } = args; + let columnSettingsString: string | undefined; + if (columnSettings && columnType === ColumnType.Status) { + columnSettingsString = JSON.stringify({ + labels: Object.fromEntries( + columnSettings.map((label: string, i: number) => [String(i + 1), label]), + ), + }); + } else if (columnSettings && columnType === ColumnType.Dropdown) { + columnSettingsString = JSON.stringify({ + settings: { + labels: columnSettings.map((label: string, i: number) => ({ id: i + 1, name: label })), + }, + }); + } + const column = await createClient(token).request(createColumnQuery, { + boardId: boardId.toString(), + columnType, + columnTitle, + columnDescription, + columnSettings: columnSettingsString, + }); + return { + type: 'text' as const, + text: JSON.stringify(column, null, 2), + }; +}; + +export const deleteColumn = async (args: z.infer, token: string) => { + const { boardId, columnId } = args; + const column = await createClient(token).request(deleteColumnQuery, { + boardId: boardId.toString(), + columnId: columnId.toString(), + }); + return { + type: 'text' as const, + text: JSON.stringify(column, null, 2), + }; +}; diff --git a/mcp_servers/monday/src/tools/index.ts b/mcp_servers/monday/src/tools/index.ts new file mode 100644 index 00000000..b18bf4ca --- /dev/null +++ b/mcp_servers/monday/src/tools/index.ts @@ -0,0 +1 @@ +export * from './users'; diff --git a/mcp_servers/monday/src/tools/items.ts b/mcp_servers/monday/src/tools/items.ts new file mode 100644 index 00000000..9535e0b5 --- /dev/null +++ b/mcp_servers/monday/src/tools/items.ts @@ -0,0 +1,138 @@ +import { z } from 'zod'; +import { createClient } from './base'; +import { + changeItemColumnValuesQuery, + createItemQuery, + createUpdateQuery, + deleteItemQuery, + getBoardItemsByNameQuery, + moveItemToGroupQuery, +} from './queries.graphql'; + +export const createItemToolSchema = z.object({ + boardId: z.string().describe('The id of the board to create the item in'), + name: z + .string() + .describe("The name of the new item to be created, must be relevant to the user's request"), + groupId: z + .string() + .optional() + .describe( + 'The id of the group id to which the new item will be added, if its not clearly specified, leave empty', + ), + columnValues: z + .string() + .describe( + 'A string containing the new column values for the item following this structure: {\\"column_id\\": \\"value\\",... you can change multiple columns at once, note that for status column you must use nested value with "label" as a key and for date column use "date" as key} - example: "{\\"text_column_id\\":\\"New text\\", \\"status_column_id\\":{\\"label\\":\\"Done\\"}, \\"date_column_id\\":{\\"date\\":\\"2023-05-25\\"},\\"dropdown_id\\":\\"value\\", \\"phone_id\\":\\"123-456-7890\\", \\"email_id\\":\\"test@example.com\\", {\\"boolean_column_id\\":{\\"checked\\":true}}', + ), +}); + +export const getBoardItemsByNameToolSchema = z.object({ + boardId: z.string().describe('The id of the board to get the items from'), + term: z.string().min(1).describe('The term to search for in the items'), +}); + +export const createUpdateToolSchema = z.object({ + itemId: z.string().describe('The id of the item to which the update will be added'), + body: z.string().describe("The update to be created, must be relevant to the user's request"), +}); + +export const deleteItemToolSchema = z.object({ + itemId: z.string().describe('The id of the item to delete'), +}); + +export const changeItemColumnValuesToolSchema = z.object({ + boardId: z.string().describe('The id of the board to change the column values of'), + itemId: z.string().describe('The id of the item to change the column values of'), + columnValues: z + .string() + .describe( + `A string containing the new column values for the item following this structure: {\\"column_id\\": \\"value\\",... you can change multiple columns at once, note that for status column you must use nested value with 'label' as a key and for date column use 'date' as key} - example: "{\\"text_column_id\\":\\"New text\\", \\"status_column_id\\":{\\"label\\":\\"Done\\"}, \\"date_column_id\\":{\\"date\\":\\"2023-05-25\\"}, \\"phone_id\\":\\"123-456-7890\\", \\"email_id\\":\\"test@example.com\\", {\\"boolean_column_id\\":{\\"checked\\":true}}"`, + ), +}); + +export const moveItemToGroupToolSchema = z.object({ + itemId: z.string().describe('The id of the item to move'), + groupId: z.string().describe('The id of the group to which the item will be moved'), +}); + +export const createItem = async (args: z.infer, token: string) => { + const { boardId, name, groupId, columnValues } = args; + const item = await createClient(token).request(createItemQuery, { + boardId: boardId.toString(), + itemName: name, + groupId: groupId?.toString(), + columnValues, + }); + return { + type: 'text' as const, + text: JSON.stringify(item, null, 2), + }; +}; + +export const getBoardItemsByName = async ( + args: z.infer, + token: string, +) => { + const { boardId, term } = args; + const items = await createClient(token).request(getBoardItemsByNameQuery, { + boardId: boardId.toString(), + term, + }); + return { + type: 'text' as const, + text: JSON.stringify(items, null, 2), + }; +}; + +export const createUpdate = async (args: z.infer, token: string) => { + const { itemId, body } = args; + const update = await createClient(token).request(createUpdateQuery, { + itemId, + body, + }); + return { + type: 'text' as const, + text: JSON.stringify(update, null, 2), + }; +}; + +export const deleteItem = async (args: z.infer, token: string) => { + const { itemId } = args; + const item = await createClient(token).request(deleteItemQuery, { id: itemId.toString() }); + return { + type: 'text' as const, + text: JSON.stringify(item, null, 2), + }; +}; + +export const changeItemColumnValues = async ( + args: z.infer, + token: string, +) => { + const { boardId, itemId, columnValues } = args; + const item = await createClient(token).request(changeItemColumnValuesQuery, { + boardId: boardId.toString(), + itemId: itemId.toString(), + columnValues, + }); + return { + type: 'text' as const, + text: JSON.stringify(item, null, 2), + }; +}; + +export const moveItemToGroup = async ( + args: z.infer, + token: string, +) => { + const { itemId, groupId } = args; + const item = await createClient(token).request(moveItemToGroupQuery, { + itemId: itemId.toString(), + groupId: groupId.toString(), + }); + return { + type: 'text' as const, + text: JSON.stringify(item, null, 2), + }; +}; diff --git a/mcp_servers/monday/src/tools/queries.graphql.ts b/mcp_servers/monday/src/tools/queries.graphql.ts new file mode 100644 index 00000000..f9cf206a --- /dev/null +++ b/mcp_servers/monday/src/tools/queries.graphql.ts @@ -0,0 +1,469 @@ +import { gql } from 'graphql-request'; + +export const deleteItemQuery = gql` + mutation DeleteItem($id: ID!) { + delete_item(item_id: $id) { + id + } + } +`; + +export const getBoardsQuery = gql` + query getBoards { + boards { + id + name + description + workspace_id + } + } +`; + +export const getBoardItemsByNameQuery = gql` + query GetBoardItemsByName($boardId: ID!, $term: CompareValue!) { + boards(ids: [$boardId]) { + items_page( + query_params: { + rules: [{ column_id: "name", operator: contains_text, compare_value: $term }] + } + ) { + items { + id + name + column_values { + id + value + type + } + } + } + } + } +`; + +export const createItemQuery = gql` + mutation createItem($boardId: ID!, $itemName: String!, $groupId: String, $columnValues: JSON) { + create_item( + board_id: $boardId + item_name: $itemName + group_id: $groupId + column_values: $columnValues + ) { + id + name + } + } +`; + +export const createUpdateQuery = gql` + mutation createUpdate($itemId: ID!, $body: String!) { + create_update(item_id: $itemId, body: $body) { + id + } + } +`; + +export const getBoardSchemaQuery = gql` + query getBoardSchema($boardId: ID!) { + boards(ids: [$boardId]) { + groups { + id + title + } + columns { + id + type + title + settings_str + } + } + } +`; + +export const getUsersByNameQuery = gql` + query getUsersByName($name: String) { + users(name: $name) { + id + name + title + } + } +`; + +export const changeItemColumnValuesQuery = gql` + mutation changeItemColumnValues($boardId: ID!, $itemId: ID!, $columnValues: JSON!) { + change_multiple_column_values( + board_id: $boardId + item_id: $itemId + column_values: $columnValues + ) { + id + } + } +`; + +export const moveItemToGroupQuery = gql` + mutation moveItemToGroup($itemId: ID!, $groupId: String!) { + move_item_to_group(item_id: $itemId, group_id: $groupId) { + id + } + } +`; + +export const createBoardQuery = gql` + mutation createBoard( + $boardKind: BoardKind! + $boardName: String! + $boardDescription: String + $workspaceId: ID + ) { + create_board( + board_kind: $boardKind + board_name: $boardName + description: $boardDescription + workspace_id: $workspaceId + empty: true + ) { + id + } + } +`; + +export const createColumnQuery = gql` + mutation createColumn( + $boardId: ID! + $columnType: ColumnType! + $columnTitle: String! + $columnDescription: String + $columnSettings: JSON + ) { + create_column( + board_id: $boardId + column_type: $columnType + title: $columnTitle + description: $columnDescription + defaults: $columnSettings + ) { + id + } + } +`; + +export const deleteColumnQuery = gql` + mutation deleteColumn($boardId: ID!, $columnId: String!) { + delete_column(board_id: $boardId, column_id: $columnId) { + id + } + } +`; + +export const getGraphQLSchemaQuery = gql` + query getGraphQLSchema { + __schema { + queryType { + name + } + mutationType { + name + } + types { + name + kind + } + } + queryType: __type(name: "Query") { + name + fields { + name + description + type { + name + kind + ofType { + name + kind + ofType { + name + kind + } + } + } + } + } + mutationType: __type(name: "Mutation") { + name + fields { + name + description + type { + name + kind + ofType { + name + kind + ofType { + name + kind + } + } + } + } + } + } +`; + +export const introspectionQueryQuery = gql` + query IntrospectionQuery { + __schema { + queryType { + name + } + mutationType { + name + } + subscriptionType { + name + } + types { + ...FullType + } + directives { + name + description + locations + args(includeDeprecated: true) { + ...InputValue + } + } + } + } + + fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args(includeDeprecated: true) { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields(includeDeprecated: true) { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } + } + + fragment InputValue on __InputValue { + name + description + type { + ...TypeRef + } + defaultValue + isDeprecated + deprecationReason + } + + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } + } +`; + +// it cant be a variable due to a bug in the API so must be generated string. +export const generateTypeDetailsQuery = (typeName: string) => gql` + query getTypeDetails { + __type(name: "${typeName}") { + name + description + kind + fields { + name + description + type { + name + kind + ofType { + name + kind + ofType { + name + kind + ofType { + name + kind + ofType { + name + kind + } + } + } + } + } + args { + name + description + type { + name + kind + ofType { + name + kind + ofType { + name + kind + ofType { + name + kind + } + } + } + } + defaultValue + } + } + inputFields { + name + description + type { + name + kind + ofType { + name + kind + ofType { + name + kind + ofType { + name + kind + ofType { + name + kind + } + } + } + } + } + defaultValue + } + interfaces { + name + } + enumValues { + name + description + } + possibleTypes { + name + } + } + } +`; + +export const createCustomActivityQuery = gql` + mutation createCustomActivity( + $color: CustomActivityColor! + $icon_id: CustomActivityIcon! + $name: String! + ) { + create_custom_activity(color: $color, icon_id: $icon_id, name: $name) { + color + icon_id + name + } + } +`; + +export const createTimelineItemQuery = gql` + mutation createTimelineItem( + $item_id: ID! + $custom_activity_id: String! + $title: String! + $summary: String + $content: String + $timestamp: ISO8601DateTime! + $time_range: TimelineItemTimeRange + $location: String + $phone: String + $url: String + ) { + create_timeline_item( + item_id: $item_id + custom_activity_id: $custom_activity_id + title: $title + summary: $summary + content: $content + timestamp: $timestamp + time_range: $time_range + location: $location + phone: $phone + url: $url + ) { + id + title + content + created_at + custom_activity_id + type + } + } +`; + +export const fetchCustomActivityQuery = gql` + query fetchCustomActivity { + custom_activity { + color + icon_id + id + name + type + } + } +`; diff --git a/mcp_servers/monday/src/tools/users.ts b/mcp_servers/monday/src/tools/users.ts new file mode 100644 index 00000000..9da76c9b --- /dev/null +++ b/mcp_servers/monday/src/tools/users.ts @@ -0,0 +1,16 @@ +import { z } from 'zod'; +import { createClient } from './base'; +import { getUsersByNameQuery } from './queries.graphql'; + +export const getUsersToolSchema = z.object({ + name: z.string().describe('The name or partial name of the user to get'), +}); + +export const getUsersByName = async (args: z.infer, token: string) => { + const client = createClient(token); + const users = await client.request(getUsersByNameQuery, { name: args.name }); + return { + type: 'text' as const, + text: JSON.stringify(users), + }; +}; diff --git a/mcp_servers/monday/tsconfig.json b/mcp_servers/monday/tsconfig.json new file mode 100644 index 00000000..58c4ac1e --- /dev/null +++ b/mcp_servers/monday/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "CommonJS", + "lib": ["ESNext"], + "esModuleInterop": true, + "moduleResolution": "Node", + "outDir": "dist", + "experimentalDecorators": true, + "rootDir": "src", + "strict": true, + "strictNullChecks": true, + "types": ["node"] + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/mcp_servers/moneybird/.env.example b/mcp_servers/moneybird/.env.example new file mode 100644 index 00000000..bd5402cd --- /dev/null +++ b/mcp_servers/moneybird/.env.example @@ -0,0 +1,5 @@ +# Moneybird API Configuration +MONEYBIRD_API_TOKEN=your_moneybird_api_token_here + +# Server Configuration (Optional) +MONEYBIRD_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/moneybird/Dockerfile b/mcp_servers/moneybird/Dockerfile new file mode 100644 index 00000000..9494081e --- /dev/null +++ b/mcp_servers/moneybird/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/moneybird/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/moneybird/server.py . +COPY mcp_servers/moneybird/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/moneybird/README.md b/mcp_servers/moneybird/README.md new file mode 100644 index 00000000..7ab2622f --- /dev/null +++ b/mcp_servers/moneybird/README.md @@ -0,0 +1,78 @@ +# Moneybird MCP Server + +A Model Context Protocol (MCP) server for Moneybird integration. Manage invoices, contacts, and accounting data using Moneybird's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Moneybird with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MONEYBIRD", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/moneybird-mcp-server:latest + + +# Run Moneybird MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/moneybird-mcp-server:latest + + +# Run Moneybird MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_moneybird_api_token_here"}' \ + ghcr.io/klavis-ai/moneybird-mcp-server:latest +``` + +**OAuth Setup:** Moneybird requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Invoice Management**: Create, read, update, and send invoices +- **Contact Management**: Manage customer and supplier information +- **Financial Reporting**: Access financial reports and accounting data +- **Payment Tracking**: Monitor payments and outstanding amounts +- **Tax Operations**: Handle tax calculations and VAT reporting + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/moneybird/requirements.txt b/mcp_servers/moneybird/requirements.txt new file mode 100644 index 00000000..2bb6d9a4 --- /dev/null +++ b/mcp_servers/moneybird/requirements.txt @@ -0,0 +1,9 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/moneybird/server.py b/mcp_servers/moneybird/server.py new file mode 100644 index 00000000..35543f96 --- /dev/null +++ b/mcp_servers/moneybird/server.py @@ -0,0 +1,850 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + moneybird_list_administrations, + moneybird_list_contacts, moneybird_get_contact, moneybird_create_contact, moneybird_create_contact_person, + moneybird_list_sales_invoices, moneybird_get_sales_invoice, moneybird_create_sales_invoice, + moneybird_list_financial_accounts, moneybird_list_products, + moneybird_list_projects, moneybird_list_time_entries +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +MONEYBIRD_MCP_SERVER_PORT = int(os.getenv("MONEYBIRD_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + + return "" + +@click.command() +@click.option("--port", default=MONEYBIRD_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("moneybird-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Administration + types.Tool( + name="moneybird_list_administrations", + description="List all administrations that the authenticated user has access to. This should be called first to discover available administrations before using other tools.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_ADMINISTRATION", "readOnlyHint": True}), + ), + # Contacts + types.Tool( + name="moneybird_list_contacts", + description="List all contacts in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "query": { + "type": "string", + "description": "Search query to filter contacts.", + }, + "page": { + "type": "integer", + "description": "Page number for pagination.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_CONTACT", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_get_contact", + description="Get details for a specific contact by ID.", + inputSchema={ + "type": "object", + "required": ["administration_id", "contact_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "contact_id": { + "type": "string", + "description": "The ID of the contact to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_CONTACT", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_create_contact", + description="Create a new company contact in Moneybird (for organizations/companies).", + inputSchema={ + "type": "object", + "required": ["administration_id", "contact_data"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "contact_data": { + "type": "object", + "description": "Contact information for company/organization.", + "properties": { + "contact": { + "type": "object", + "properties": { + "company_name": {"type": "string", "description": "Company name (required)"}, + "phone": {"type": "string"}, + "send_invoices_to_email": {"type": "string"}, + "address1": {"type": "string"}, + "city": {"type": "string"}, + "country": {"type": "string"}, + } + } + } + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_CONTACT"}), + ), + types.Tool( + name="moneybird_create_contact_person", + description="Create a new contact person within an existing company contact in Moneybird.", + inputSchema={ + "type": "object", + "required": ["administration_id", "contact_id", "contact_person_data"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "contact_id": { + "type": "string", + "description": "The ID of the existing company contact.", + }, + "contact_person_data": { + "type": "object", + "description": "Contact person information.", + "properties": { + "contact_person": { + "type": "object", + "properties": { + "firstname": {"type": "string", "description": "First name (required)"}, + "lastname": {"type": "string", "description": "Last name (required)"}, + "phone": {"type": "string"}, + "email": {"type": "string"}, + "department": {"type": "string"}, + } + } + } + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_CONTACT"}), + ), + # Sales Invoices + types.Tool( + name="moneybird_list_sales_invoices", + description="List all sales invoices in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "state": { + "type": "string", + "description": "Filter by invoice state (draft, open, paid, etc.).", + }, + "period": { + "type": "string", + "description": "Filter by period (this_month, this_year, etc.).", + }, + "contact_id": { + "type": "string", + "description": "Filter by contact ID.", + }, + "created_after": { + "type": "string", + "description": "Filter invoices created after this date (YYYY-MM-DD).", + }, + "updated_after": { + "type": "string", + "description": "Filter invoices updated after this date (YYYY-MM-DD).", + }, + "page": { + "type": "integer", + "description": "Page number for pagination.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_INVOICE", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_get_sales_invoice", + description="Get details for a specific sales invoice by ID.", + inputSchema={ + "type": "object", + "required": ["administration_id", "invoice_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "invoice_id": { + "type": "string", + "description": "The ID of the sales invoice to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_INVOICE", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_create_sales_invoice", + description="Create a new sales invoice in Moneybird.", + inputSchema={ + "type": "object", + "required": ["administration_id", "invoice_data"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "invoice_data": { + "type": "object", + "description": "Sales invoice data including contact_id and details.", + "properties": { + "sales_invoice": { + "type": "object", + "properties": { + "contact_id": {"type": "string"}, + "invoice_date": {"type": "string"}, + "due_date": {"type": "string"}, + "reference": {"type": "string"}, + "details_attributes": {"type": "array"} + } + } + } + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_INVOICE"}), + ), + # Financial + types.Tool( + name="moneybird_list_financial_accounts", + description="List all financial accounts in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_list_products", + description="List all products in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "query": { + "type": "string", + "description": "Search query to filter products.", + }, + "page": { + "type": "integer", + "description": "Page number for pagination.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_PRODUCT", "readOnlyHint": True}), + ), + # Projects & Time + types.Tool( + name="moneybird_list_projects", + description="List all projects in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "state": { + "type": "string", + "description": "Filter by project state (active, archived, all).", + }, + "page": { + "type": "integer", + "description": "Page number for pagination.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_PROJECT", "readOnlyHint": True}), + ), + types.Tool( + name="moneybird_list_time_entries", + description="List all time entries in Moneybird administration.", + inputSchema={ + "type": "object", + "required": ["administration_id"], + "properties": { + "administration_id": { + "type": "string", + "description": "The ID of the Moneybird administration.", + }, + "period": { + "type": "string", + "description": "Filter by time period.", + }, + "contact_id": { + "type": "string", + "description": "Filter by contact ID.", + }, + "project_id": { + "type": "string", + "description": "Filter by project ID.", + }, + "user_id": { + "type": "string", + "description": "Filter by user ID.", + }, + "page": { + "type": "integer", + "description": "Page number for pagination.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MONEYBIRD_ENTRY", "readOnlyHint": True}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # Administration + if name == "moneybird_list_administrations": + try: + result = await moneybird_list_administrations() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Contacts + if name == "moneybird_list_contacts": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + query = arguments.get("query") + page = arguments.get("page") + + try: + result = await moneybird_list_contacts(administration_id, query, page) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_get_contact": + administration_id = arguments.get("administration_id") + contact_id = arguments.get("contact_id") + if not administration_id or not contact_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id and contact_id parameters are required", + ) + ] + + try: + result = await moneybird_get_contact(administration_id, contact_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_create_contact": + administration_id = arguments.get("administration_id") + contact_data = arguments.get("contact_data") + if not administration_id or not contact_data: + return [ + types.TextContent( + type="text", + text="Error: administration_id and contact_data parameters are required", + ) + ] + + try: + result = await moneybird_create_contact(administration_id, contact_data) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_create_contact_person": + administration_id = arguments.get("administration_id") + contact_id = arguments.get("contact_id") + contact_person_data = arguments.get("contact_person_data") + if not administration_id or not contact_id or not contact_person_data: + return [ + types.TextContent( + type="text", + text="Error: administration_id, contact_id, and contact_person_data parameters are required", + ) + ] + + try: + result = await moneybird_create_contact_person(administration_id, contact_id, contact_person_data) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Sales Invoices + elif name == "moneybird_list_sales_invoices": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + state = arguments.get("state") + period = arguments.get("period") + contact_id = arguments.get("contact_id") + created_after = arguments.get("created_after") + updated_after = arguments.get("updated_after") + page = arguments.get("page") + + try: + result = await moneybird_list_sales_invoices(administration_id, state, period, contact_id, created_after, updated_after, page) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_get_sales_invoice": + administration_id = arguments.get("administration_id") + invoice_id = arguments.get("invoice_id") + if not administration_id or not invoice_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id and invoice_id parameters are required", + ) + ] + + try: + result = await moneybird_get_sales_invoice(administration_id, invoice_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_create_sales_invoice": + administration_id = arguments.get("administration_id") + invoice_data = arguments.get("invoice_data") + if not administration_id or not invoice_data: + return [ + types.TextContent( + type="text", + text="Error: administration_id and invoice_data parameters are required", + ) + ] + + try: + result = await moneybird_create_sales_invoice(administration_id, invoice_data) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Financial + elif name == "moneybird_list_financial_accounts": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + try: + result = await moneybird_list_financial_accounts(administration_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_list_products": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + query = arguments.get("query") + page = arguments.get("page") + + try: + result = await moneybird_list_products(administration_id, query, page) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Projects & Time + elif name == "moneybird_list_projects": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + state = arguments.get("state") + page = arguments.get("page") + + try: + result = await moneybird_list_projects(administration_id, state, page) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "moneybird_list_time_entries": + administration_id = arguments.get("administration_id") + if not administration_id: + return [ + types.TextContent( + type="text", + text="Error: administration_id parameter is required", + ) + ] + + period = arguments.get("period") + contact_id = arguments.get("contact_id") + project_id = arguments.get("project_id") + user_id = arguments.get("user_id") + page = arguments.get("page") + + try: + result = await moneybird_list_time_entries(administration_id, period, contact_id, project_id, user_id, page) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract access token from headers + access_token = extract_access_token(request) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract access token from headers + access_token = extract_access_token(scope) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/__init__.py b/mcp_servers/moneybird/tools/__init__.py new file mode 100644 index 00000000..dd74d1c0 --- /dev/null +++ b/mcp_servers/moneybird/tools/__init__.py @@ -0,0 +1,33 @@ +from .administration import moneybird_list_administrations +from .contacts import moneybird_list_contacts, moneybird_get_contact, moneybird_create_contact, moneybird_create_contact_person +from .sales_invoices import moneybird_list_sales_invoices, moneybird_get_sales_invoice, moneybird_create_sales_invoice +from .financial import moneybird_list_financial_accounts, moneybird_list_products +from .projects_time import moneybird_list_projects, moneybird_list_time_entries +from .base import auth_token_context + +__all__ = [ + # Administration + "moneybird_list_administrations", + + # Contacts + "moneybird_list_contacts", + "moneybird_get_contact", + "moneybird_create_contact", + "moneybird_create_contact_person", + + # Sales Invoices + "moneybird_list_sales_invoices", + "moneybird_get_sales_invoice", + "moneybird_create_sales_invoice", + + # Financial + "moneybird_list_financial_accounts", + "moneybird_list_products", + + # Projects & Time + "moneybird_list_projects", + "moneybird_list_time_entries", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/administration.py b/mcp_servers/moneybird/tools/administration.py new file mode 100644 index 00000000..f1a25a80 --- /dev/null +++ b/mcp_servers/moneybird/tools/administration.py @@ -0,0 +1,44 @@ +import logging +from typing import Any, Dict +from .base import get_auth_token, MONEYBIRD_API_ENDPOINT +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +async def moneybird_list_administrations() -> Dict[str, Any]: + """List all administrations that the authenticated user has access to.""" + logger.info("Executing tool: moneybird_list_administrations") + + api_key = get_auth_token() + + if not api_key: + raise RuntimeError("No API key provided. Please set the x-auth-token header.") + + # Administrations endpoint is at the root level, no administration_id needed + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + + # The administrations endpoint doesn't require an administration_id + url = f"{MONEYBIRD_API_ENDPOINT}/administrations" + + try: + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers) + response.raise_for_status() + + try: + json_response = response.json() + if json_response is None: + return {"data": None, "message": "API returned null response"} + return json_response + except ValueError as e: + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_administrations: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/base.py b/mcp_servers/moneybird/tools/base.py new file mode 100644 index 00000000..7fbb7b59 --- /dev/null +++ b/mcp_servers/moneybird/tools/base.py @@ -0,0 +1,105 @@ +import logging +import os +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx +from dotenv import load_dotenv + +# Load env vars from .env +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +MONEYBIRD_API_ENDPOINT = "https://moneybird.com/api/v2" + +# Context variable to store the API key for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """ + Get the Moneybird API token from context or fallback to env. + """ + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable + token = os.getenv("MONEYBIRD_API_TOKEN") + logger.debug(f"Using token from environment: {bool(token)}") + if not token: + raise RuntimeError("No Moneybird auth token found in context or environment") + return token + except LookupError: + # Context variable not set at all + token = os.getenv("MONEYBIRD_API_TOKEN") + if not token: + raise RuntimeError("No Moneybird auth token found in context or environment") + return token + +class MoneybirdClient: + """Client for Moneybird API v2 using Bearer Authentication.""" + + @staticmethod + async def make_request( + method: str, + administration_id: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Moneybird API.""" + api_key = get_auth_token() + + if not api_key: + raise RuntimeError("No API key provided. Please set the x-auth-token header.") + + # Moneybird uses Bearer Authentication + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + + # Moneybird API structure: /api/v2/:administration_id/:endpoint + url = f"{MONEYBIRD_API_ENDPOINT}/{administration_id}{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data) + elif method.upper() == "PATCH": + response = await client.patch(url, headers=headers, json=data) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + + # Handle empty responses for DELETE operations + if response.status_code == 204 or not response.content: + return {"success": True} + + try: + json_response = response.json() + # Handle null/undefined responses + if json_response is None: + return {"data": None, "message": "API returned null response"} + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + +async def make_request( + method: str, + administration_id: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make an HTTP request to Moneybird API.""" + return await MoneybirdClient.make_request(method, administration_id, endpoint, data, params) \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/contacts.py b/mcp_servers/moneybird/tools/contacts.py new file mode 100644 index 00000000..586774e2 --- /dev/null +++ b/mcp_servers/moneybird/tools/contacts.py @@ -0,0 +1,66 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def moneybird_list_contacts( + administration_id: str, + query: Optional[str] = None, + page: Optional[int] = None +) -> Dict[str, Any]: + """List all contacts in Moneybird.""" + logger.info("Executing tool: moneybird_list_contacts") + + params = {} + if query: + params["query"] = query + if page: + params["page"] = page + + try: + return await make_request("GET", administration_id, "/contacts", params=params) + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_contacts: {e}") + raise e + +async def moneybird_get_contact( + administration_id: str, + contact_id: str +) -> Dict[str, Any]: + """Get details for a specific contact by ID.""" + logger.info(f"Executing tool: moneybird_get_contact for contact_id: {contact_id}") + + try: + return await make_request("GET", administration_id, f"/contacts/{contact_id}") + except Exception as e: + logger.exception(f"Error executing tool moneybird_get_contact: {e}") + raise e + +async def moneybird_create_contact( + administration_id: str, + contact_data: Dict[str, Any] +) -> Dict[str, Any]: + """Create a new company contact in Moneybird.""" + logger.info("Executing tool: moneybird_create_contact") + + try: + return await make_request("POST", administration_id, "/contacts", data=contact_data) + except Exception as e: + logger.exception(f"Error executing tool moneybird_create_contact: {e}") + raise e + +async def moneybird_create_contact_person( + administration_id: str, + contact_id: str, + contact_person_data: Dict[str, Any] +) -> Dict[str, Any]: + """Create a new contact person within an existing contact in Moneybird.""" + logger.info(f"Executing tool: moneybird_create_contact_person for contact_id: {contact_id}") + + try: + return await make_request("POST", administration_id, f"/contacts/{contact_id}/contact_people", data=contact_person_data) + except Exception as e: + logger.exception(f"Error executing tool moneybird_create_contact_person: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/financial.py b/mcp_servers/moneybird/tools/financial.py new file mode 100644 index 00000000..fadd4945 --- /dev/null +++ b/mcp_servers/moneybird/tools/financial.py @@ -0,0 +1,38 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def moneybird_list_financial_accounts( + administration_id: str +) -> Dict[str, Any]: + """List all financial accounts in Moneybird.""" + logger.info("Executing tool: moneybird_list_financial_accounts") + + try: + return await make_request("GET", administration_id, "/financial_accounts") + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_financial_accounts: {e}") + raise e + +async def moneybird_list_products( + administration_id: str, + query: Optional[str] = None, + page: Optional[int] = None +) -> Dict[str, Any]: + """List all products in Moneybird.""" + logger.info("Executing tool: moneybird_list_products") + + params = {} + if query: + params["query"] = query + if page: + params["page"] = page + + try: + return await make_request("GET", administration_id, "/products", params=params) + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_products: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/projects_time.py b/mcp_servers/moneybird/tools/projects_time.py new file mode 100644 index 00000000..f4a2a5c2 --- /dev/null +++ b/mcp_servers/moneybird/tools/projects_time.py @@ -0,0 +1,55 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def moneybird_list_projects( + administration_id: str, + state: Optional[str] = None, + page: Optional[int] = None +) -> Dict[str, Any]: + """List all projects in Moneybird.""" + logger.info("Executing tool: moneybird_list_projects") + + params = {} + if state: + params["state"] = state # active, archived, all + if page: + params["page"] = page + + try: + return await make_request("GET", administration_id, "/projects", params=params) + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_projects: {e}") + raise e + +async def moneybird_list_time_entries( + administration_id: str, + period: Optional[str] = None, + contact_id: Optional[str] = None, + project_id: Optional[str] = None, + user_id: Optional[str] = None, + page: Optional[int] = None +) -> Dict[str, Any]: + """List all time entries in Moneybird.""" + logger.info("Executing tool: moneybird_list_time_entries") + + params = {} + if period: + params["period"] = period + if contact_id: + params["contact_id"] = contact_id + if project_id: + params["project_id"] = project_id + if user_id: + params["user_id"] = user_id + if page: + params["page"] = page + + try: + return await make_request("GET", administration_id, "/time_entries", params=params) + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_time_entries: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/moneybird/tools/sales_invoices.py b/mcp_servers/moneybird/tools/sales_invoices.py new file mode 100644 index 00000000..13baee29 --- /dev/null +++ b/mcp_servers/moneybird/tools/sales_invoices.py @@ -0,0 +1,64 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def moneybird_list_sales_invoices( + administration_id: str, + state: Optional[str] = None, + period: Optional[str] = None, + contact_id: Optional[str] = None, + created_after: Optional[str] = None, + updated_after: Optional[str] = None, + page: Optional[int] = None +) -> Dict[str, Any]: + """List all sales invoices in Moneybird.""" + logger.info("Executing tool: moneybird_list_sales_invoices") + + params = {} + if state: + params["filter"] = f"state:{state}" + if period: + params["period"] = period + if contact_id: + params["contact_id"] = contact_id + if created_after: + params["created_after"] = created_after + if updated_after: + params["updated_after"] = updated_after + if page: + params["page"] = page + + try: + return await make_request("GET", administration_id, "/sales_invoices", params=params) + except Exception as e: + logger.exception(f"Error executing tool moneybird_list_sales_invoices: {e}") + raise e + +async def moneybird_get_sales_invoice( + administration_id: str, + invoice_id: str +) -> Dict[str, Any]: + """Get details for a specific sales invoice by ID.""" + logger.info(f"Executing tool: moneybird_get_sales_invoice for invoice_id: {invoice_id}") + + try: + return await make_request("GET", administration_id, f"/sales_invoices/{invoice_id}") + except Exception as e: + logger.exception(f"Error executing tool moneybird_get_sales_invoice: {e}") + raise e + +async def moneybird_create_sales_invoice( + administration_id: str, + invoice_data: Dict[str, Any] +) -> Dict[str, Any]: + """Create a new sales invoice in Moneybird.""" + logger.info("Executing tool: moneybird_create_sales_invoice") + + try: + return await make_request("POST", administration_id, "/sales_invoices", data=invoice_data) + except Exception as e: + logger.exception(f"Error executing tool moneybird_create_sales_invoice: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/motion/.env.example b/mcp_servers/motion/.env.example new file mode 100644 index 00000000..3e8c69c9 --- /dev/null +++ b/mcp_servers/motion/.env.example @@ -0,0 +1 @@ +MOTION_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/motion/Dockerfile b/mcp_servers/motion/Dockerfile new file mode 100644 index 00000000..29601d0f --- /dev/null +++ b/mcp_servers/motion/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/motion/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/motion/server.py . +COPY mcp_servers/motion/tools/ ./tools/ + +COPY mcp_servers/motion/.env.example .env + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/motion/README.md b/mcp_servers/motion/README.md new file mode 100644 index 00000000..e85a82a4 --- /dev/null +++ b/mcp_servers/motion/README.md @@ -0,0 +1,78 @@ +# Motion MCP Server + +A Model Context Protocol (MCP) server for Motion integration. Manage calendar, tasks, and scheduling using Motion's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Motion with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("MOTION", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/motion-mcp-server:latest + + +# Run Motion MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/motion-mcp-server:latest + + +# Run Motion MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_motion_api_key_here"}' \ + ghcr.io/klavis-ai/motion-mcp-server:latest +``` + +**OAuth Setup:** Motion requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Task Management**: Create, update, and manage tasks with AI scheduling +- **Calendar Integration**: Sync and manage calendar events +- **Project Planning**: Handle project timelines and deadlines +- **AI Scheduling**: Leverage Motion's AI-powered scheduling features +- **Team Collaboration**: Manage team tasks and shared projects + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/motion/requirements.txt b/mcp_servers/motion/requirements.txt new file mode 100644 index 00000000..7eda28a2 --- /dev/null +++ b/mcp_servers/motion/requirements.txt @@ -0,0 +1,10 @@ +mcp==1.11.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +requests +httpx +click +starlette \ No newline at end of file diff --git a/mcp_servers/motion/server.py b/mcp_servers/motion/server.py new file mode 100644 index 00000000..bfdf873e --- /dev/null +++ b/mcp_servers/motion/server.py @@ -0,0 +1,833 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + get_tasks, get_task, create_task, update_task, delete_task, search_tasks, + get_projects, get_project, create_project, + get_comments, create_comment, + get_users, get_my_user, + get_workspaces +) + +logger = logging.getLogger(__name__) + +load_dotenv() + +MOTION_MCP_SERVER_PORT = int(os.getenv("MOTION_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=MOTION_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("motion-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="motion_get_workspaces", + description="Get all workspaces in the Motion account.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_WORKSPACE", "readOnlyHint": True}), + ), + types.Tool( + name="motion_get_users", + description="Get all users, optionally filtered by workspace.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "Optional workspace ID to filter users by workspace.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_WORKSPACE", "readOnlyHint": True}), + ), + types.Tool( + name="motion_get_my_user", + description="Get current user information.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_USER", "readOnlyHint": True}), + ), + types.Tool( + name="motion_get_tasks", + description="Get tasks, optionally filtered by workspace.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "Optional workspace ID to filter tasks by workspace.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK", "readOnlyHint": True}), + ), + types.Tool( + name="motion_get_task", + description="Get a specific task by its ID.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK", "readOnlyHint": True}), + ), + types.Tool( + name="motion_create_task", + description="Create a new task in Motion.", + inputSchema={ + "type": "object", + "required": ["name", "workspace_id"], + "properties": { + "name": { + "type": "string", + "description": "The name of the task.", + }, + "workspace_id": { + "type": "string", + "description": "The ID of the workspace to create the task in.", + }, + "description": { + "type": "string", + "description": "The description of the task.", + }, + "status": { + "type": "string", + "description": "The status of the task.", + }, + "priority": { + "type": "string", + "description": "The priority of the task.", + }, + "assignee_id": { + "type": "string", + "description": "The ID of the user to assign the task to.", + }, + "project_id": { + "type": "string", + "description": "The ID of the project to assign the task to.", + }, + "due_date": { + "type": "string", + "description": "The due date for the task (ISO date string).", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK"}), + ), + types.Tool( + name="motion_update_task", + description="Update an existing task in Motion.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to update.", + }, + "name": { + "type": "string", + "description": "The new name of the task.", + }, + "description": { + "type": "string", + "description": "The new description of the task.", + }, + "status": { + "type": "string", + "description": "The new status of the task.", + }, + "priority": { + "type": "string", + "description": "The new priority of the task.", + }, + "assignee_id": { + "type": "string", + "description": "The ID of the user to assign the task to.", + }, + "project_id": { + "type": "string", + "description": "The ID of the project to assign the task to.", + }, + "due_date": { + "type": "string", + "description": "The new due date for the task (ISO date string).", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK"}), + ), + types.Tool( + name="motion_delete_task", + description="Delete a task from Motion.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to delete.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK"}), + ), + types.Tool( + name="motion_search_tasks", + description="Search for tasks by name or description.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "The search query to find tasks.", + }, + "workspace_id": { + "type": "string", + "description": "Optional workspace ID to limit search to specific workspace.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_TASK"}), + ), + + types.Tool( + name="motion_get_projects", + description="Get projects, optionally filtered by workspace.", + inputSchema={ + "type": "object", + "properties": { + "workspace_id": { + "type": "string", + "description": "Optional workspace ID to filter projects by workspace.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_PROJECT", "readOnlyHint": True}), + ), + types.Tool( + name="motion_get_project", + description="Get a specific project by its ID.", + inputSchema={ + "type": "object", + "required": ["project_id"], + "properties": { + "project_id": { + "type": "string", + "description": "The ID of the project to retrieve.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_PROJECT", "readOnlyHint": True}), + ), + types.Tool( + name="motion_create_project", + description="Create a new project in Motion.", + inputSchema={ + "type": "object", + "required": ["name", "workspace_id"], + "properties": { + "name": { + "type": "string", + "description": "The name of the project.", + }, + "workspace_id": { + "type": "string", + "description": "The ID of the workspace to create the project in.", + }, + "description": { + "type": "string", + "description": "The description of the project.", + }, + "status": { + "type": "string", + "description": "The status of the project.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_PROJECT"}), + ), + types.Tool( + name="motion_get_comments", + description="Get comments for a specific task.", + inputSchema={ + "type": "object", + "required": ["task_id"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to get comments for.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_COMMENT", "readOnlyHint": True}), + ), + types.Tool( + name="motion_create_comment", + description="Create a comment on a task.", + inputSchema={ + "type": "object", + "required": ["task_id", "content"], + "properties": { + "task_id": { + "type": "string", + "description": "The ID of the task to comment on.", + }, + "content": { + "type": "string", + "description": "The content of the comment.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MOTION_COMMENT"}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + if name == "motion_get_workspaces": + try: + result = await get_workspaces() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_users": + workspace_id = arguments.get("workspace_id") + try: + result = await get_users(workspace_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_my_user": + try: + result = await get_my_user() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_tasks": + workspace_id = arguments.get("workspace_id") + try: + result = await get_tasks(workspace_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_task": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + try: + result = await get_task(task_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_create_task": + name_param = arguments.get("name") + workspace_id = arguments.get("workspace_id") + if not name_param or not workspace_id: + return [ + types.TextContent( + type="text", + text="Error: name and workspace_id parameters are required", + ) + ] + + description = arguments.get("description") + status = arguments.get("status") + priority = arguments.get("priority") + assignee_id = arguments.get("assignee_id") + project_id = arguments.get("project_id") + due_date = arguments.get("due_date") + + try: + result = await create_task( + name_param, workspace_id, description, status, + priority, assignee_id, project_id, due_date + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_update_task": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + + name_param = arguments.get("name") + description = arguments.get("description") + status = arguments.get("status") + priority = arguments.get("priority") + assignee_id = arguments.get("assignee_id") + project_id = arguments.get("project_id") + due_date = arguments.get("due_date") + + try: + result = await update_task( + task_id, name_param, description, status, + priority, assignee_id, project_id, due_date + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_delete_task": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + try: + result = await delete_task(task_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_search_tasks": + query = arguments.get("query") + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + workspace_id = arguments.get("workspace_id") + try: + result = await search_tasks(query, workspace_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + + elif name == "motion_get_projects": + workspace_id = arguments.get("workspace_id") + try: + result = await get_projects(workspace_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_project": + project_id = arguments.get("project_id") + if not project_id: + return [ + types.TextContent( + type="text", + text="Error: project_id parameter is required", + ) + ] + try: + result = await get_project(project_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_create_project": + name_param = arguments.get("name") + workspace_id = arguments.get("workspace_id") + if not name_param or not workspace_id: + return [ + types.TextContent( + type="text", + text="Error: name and workspace_id parameters are required", + ) + ] + + description = arguments.get("description") + status = arguments.get("status") + + try: + result = await create_project(name_param, workspace_id, description, status) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_get_comments": + task_id = arguments.get("task_id") + if not task_id: + return [ + types.TextContent( + type="text", + text="Error: task_id parameter is required", + ) + ] + try: + result = await get_comments(task_id) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "motion_create_comment": + task_id = arguments.get("task_id") + content = arguments.get("content") + if not task_id or not content: + return [ + types.TextContent( + type="text", + text="Error: task_id and content parameters are required", + ) + ] + try: + result = await create_comment(task_id, content) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + """Handle SSE-based MCP connections.""" + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = auth_token_context.set(api_key or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + """Handle StreamableHTTP-based MCP connections.""" + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Fallback to Authorization header for compatibility + if not api_key: + headers = dict(scope.get("headers", [])) + auth_header = headers.get(b"authorization", b"").decode() + if auth_header.startswith("Bearer "): + api_key = auth_header[7:] # Remove "Bearer " prefix + + # Set the API key in context for this request + token = auth_token_context.set(api_key or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/mcp_servers/motion/tools/__init__.py b/mcp_servers/motion/tools/__init__.py new file mode 100644 index 00000000..b4ee5cea --- /dev/null +++ b/mcp_servers/motion/tools/__init__.py @@ -0,0 +1,38 @@ +# Motion MCP Server Tools +# This package contains all the tool implementations organized by object type + +from .tasks import get_tasks, get_task, create_task, update_task, delete_task, search_tasks +from .projects import get_projects, get_project, create_project +from .comments import get_comments, create_comment +from .users import get_users, get_my_user +from .workspaces import get_workspaces +from .base import auth_token_context + +__all__ = [ + # Tasks + "get_tasks", + "get_task", + "create_task", + "update_task", + "delete_task", + "search_tasks", + + # Projects + "get_projects", + "get_project", + "create_project", + + # Comments + "get_comments", + "create_comment", + + # Users + "get_users", + "get_my_user", + + # Workspaces + "get_workspaces", + + # Base + "auth_token_context", +] \ No newline at end of file diff --git a/mcp_servers/motion/tools/base.py b/mcp_servers/motion/tools/base.py new file mode 100644 index 00000000..21a8d181 --- /dev/null +++ b/mcp_servers/motion/tools/base.py @@ -0,0 +1,45 @@ +import logging +from typing import Any, Dict +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +MOTION_API_ENDPOINT = "https://api.usemotion.com/v1" + +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + +async def make_api_request(endpoint: str, method: str = "GET", data: Dict[str, Any] = None) -> Dict[str, Any]: + """Make a REST API request to Motion API.""" + access_token = get_auth_token() + + headers = { + "X-API-Key": access_token, + "Content-Type": "application/json" + } + + url = f"{MOTION_API_ENDPOINT}{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers) + elif method.upper() == "POST": + response = await client.post(url, json=data, headers=headers) + elif method.upper() == "PATCH": + response = await client.patch(url, json=data, headers=headers) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + return response.json() \ No newline at end of file diff --git a/mcp_servers/motion/tools/comments.py b/mcp_servers/motion/tools/comments.py new file mode 100644 index 00000000..d6f0e37a --- /dev/null +++ b/mcp_servers/motion/tools/comments.py @@ -0,0 +1,29 @@ +import logging +from typing import Any, Dict +from .base import make_api_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_comments(task_id: str) -> Dict[str, Any]: + """Get comments for a specific task.""" + logger.info(f"Executing tool: get_comments for task_id={task_id}") + try: + endpoint = f"/tasks/{task_id}/comments" + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_comments: {e}") + raise e + +async def create_comment(task_id: str, content: str) -> Dict[str, Any]: + """Create a comment on a task.""" + logger.info(f"Executing tool: create_comment for task_id={task_id}") + try: + data = { + "content": content + } + endpoint = f"/tasks/{task_id}/comments" + return await make_api_request(endpoint, method="POST", data=data) + except Exception as e: + logger.exception(f"Error executing tool create_comment: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/motion/tools/projects.py b/mcp_servers/motion/tools/projects.py new file mode 100644 index 00000000..97103829 --- /dev/null +++ b/mcp_servers/motion/tools/projects.py @@ -0,0 +1,53 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_api_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_projects(workspace_id: Optional[str] = None) -> Dict[str, Any]: + """Get all projects.""" + logger.info("Executing tool: get_projects") + try: + endpoint = "/projects" + if workspace_id: + endpoint += f"?workspaceId={workspace_id}" + + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_projects: {e}") + raise e + +async def get_project(project_id: str) -> Dict[str, Any]: + """Get a specific project by ID.""" + logger.info(f"Executing tool: get_project with project_id={project_id}") + try: + endpoint = f"/projects/{project_id}" + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_project: {e}") + raise e + +async def create_project( + name: str, + workspace_id: str, + description: Optional[str] = None, + status: Optional[str] = None +) -> Dict[str, Any]: + """Create a new project.""" + logger.info(f"Executing tool: create_project with name={name}") + try: + data = { + "name": name, + "workspaceId": workspace_id + } + + if description: + data["description"] = description + if status: + data["status"] = status + + return await make_api_request("/projects", method="POST", data=data) + except Exception as e: + logger.exception(f"Error executing tool create_project: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/motion/tools/tasks.py b/mcp_servers/motion/tools/tasks.py new file mode 100644 index 00000000..7a078afa --- /dev/null +++ b/mcp_servers/motion/tools/tasks.py @@ -0,0 +1,130 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_api_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_tasks(workspace_id: Optional[str] = None) -> Dict[str, Any]: + """Get all tasks.""" + logger.info("Executing tool: get_tasks") + try: + endpoint = "/tasks" + params = {} + if workspace_id: + params["workspaceId"] = workspace_id + + if params: + query_string = "&".join([f"{k}={v}" for k, v in params.items()]) + endpoint += f"?{query_string}" + + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_tasks: {e}") + raise e + +async def get_task(task_id: str) -> Dict[str, Any]: + """Get a specific task by ID.""" + logger.info(f"Executing tool: get_task with task_id={task_id}") + try: + endpoint = f"/tasks/{task_id}" + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_task: {e}") + raise e + +async def create_task( + name: str, + workspace_id: str, + description: Optional[str] = None, + status: Optional[str] = None, + priority: Optional[str] = None, + assignee_id: Optional[str] = None, + project_id: Optional[str] = None, + due_date: Optional[str] = None +) -> Dict[str, Any]: + """Create a new task.""" + logger.info(f"Executing tool: create_task with name={name}") + try: + data = { + "name": name, + "workspaceId": workspace_id + } + + if description: + data["description"] = description + if status: + data["status"] = status + if priority: + data["priority"] = priority + if assignee_id: + data["assigneeId"] = assignee_id + if project_id: + data["projectId"] = project_id + if due_date: + data["dueDate"] = due_date + + return await make_api_request("/tasks", method="POST", data=data) + except Exception as e: + logger.exception(f"Error executing tool create_task: {e}") + raise e + +async def update_task( + task_id: str, + name: Optional[str] = None, + description: Optional[str] = None, + status: Optional[str] = None, + priority: Optional[str] = None, + assignee_id: Optional[str] = None, + project_id: Optional[str] = None, + due_date: Optional[str] = None +) -> Dict[str, Any]: + """Update an existing task.""" + logger.info(f"Executing tool: update_task with task_id={task_id}") + try: + data = {} + + if name is not None: + data["name"] = name + if description is not None: + data["description"] = description + if status is not None: + data["status"] = status + if priority is not None: + data["priority"] = priority + if assignee_id is not None: + data["assigneeId"] = assignee_id + if project_id is not None: + data["projectId"] = project_id + if due_date is not None: + data["dueDate"] = due_date + + return await make_api_request(f"/tasks/{task_id}", method="PATCH", data=data) + except Exception as e: + logger.exception(f"Error executing tool update_task: {e}") + raise e + +async def delete_task(task_id: str) -> Dict[str, Any]: + """Delete a task.""" + logger.info(f"Executing tool: delete_task with task_id={task_id}") + try: + await make_api_request(f"/tasks/{task_id}", method="DELETE") + return {"success": True, "message": f"Task {task_id} deleted successfully"} + except Exception as e: + logger.exception(f"Error executing tool delete_task: {e}") + raise e + +async def search_tasks(query: str, workspace_id: Optional[str] = None) -> Dict[str, Any]: + """Search tasks by name or description.""" + logger.info(f"Executing tool: search_tasks with query={query}") + try: + endpoint = f"/tasks?search={query}" + if workspace_id: + endpoint += f"&workspaceId={workspace_id}" + + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool search_tasks: {e}") + raise e + + \ No newline at end of file diff --git a/mcp_servers/motion/tools/users.py b/mcp_servers/motion/tools/users.py new file mode 100644 index 00000000..6a1abcb3 --- /dev/null +++ b/mcp_servers/motion/tools/users.py @@ -0,0 +1,29 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_api_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_users(workspace_id: Optional[str] = None) -> Dict[str, Any]: + """Get all users.""" + logger.info("Executing tool: get_users") + try: + endpoint = "/users" + if workspace_id: + endpoint += f"?workspaceId={workspace_id}" + + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_users: {e}") + raise e + +async def get_my_user() -> Dict[str, Any]: + """Get current user information.""" + logger.info("Executing tool: get_my_user") + try: + endpoint = "/users/me" + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_my_user: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/motion/tools/workspaces.py b/mcp_servers/motion/tools/workspaces.py new file mode 100644 index 00000000..84764c80 --- /dev/null +++ b/mcp_servers/motion/tools/workspaces.py @@ -0,0 +1,16 @@ +import logging +from typing import Any, Dict +from .base import make_api_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_workspaces() -> Dict[str, Any]: + """Get all workspaces.""" + logger.info("Executing tool: get_workspaces") + try: + endpoint = "/workspaces" + return await make_api_request(endpoint) + except Exception as e: + logger.exception(f"Error executing tool get_workspaces: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/notion/.env.example b/mcp_servers/notion/.env.example new file mode 100644 index 00000000..35a0ad3c --- /dev/null +++ b/mcp_servers/notion/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +NOTION_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/notion/Dockerfile b/mcp_servers/notion/Dockerfile new file mode 100644 index 00000000..f3d48fef --- /dev/null +++ b/mcp_servers/notion/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/notion/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/notion/server.py . +COPY mcp_servers/notion/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/notion/README.md b/mcp_servers/notion/README.md new file mode 100644 index 00000000..b6149622 --- /dev/null +++ b/mcp_servers/notion/README.md @@ -0,0 +1,78 @@ +# Notion MCP Server + +A Model Context Protocol (MCP) server for Notion integration. Create, read, and manage Notion pages, databases, and content using Notion's API with full OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Notion with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("NOTION", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/notion-mcp-server:latest + + +# Run Notion MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/notion-mcp-server:latest + + +# Run Notion MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_notion_token_here"}' \ + ghcr.io/klavis-ai/notion-mcp-server:latest +``` + +**OAuth Setup:** Notion requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Page Management**: Create, read, update, and delete Notion pages +- **Database Operations**: Query databases, add records, update properties +- **Content Editing**: Manage blocks, text, and rich content +- **Search**: Search across pages and databases +- **User Management**: Get user information and workspace details + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/notion/requirements.txt b/mcp_servers/notion/requirements.txt new file mode 100644 index 00000000..fbd0545b --- /dev/null +++ b/mcp_servers/notion/requirements.txt @@ -0,0 +1,9 @@ +mcp==1.11.0 +pydantic +typing-extensions +aiohttp +click +python-dotenv +starlette +uvicorn[standard] +notion-client \ No newline at end of file diff --git a/mcp_servers/notion/server.py b/mcp_servers/notion/server.py new file mode 100644 index 00000000..f078f85f --- /dev/null +++ b/mcp_servers/notion/server.py @@ -0,0 +1,1207 @@ +import contextlib +import json +import logging +import os +from collections.abc import AsyncIterator +from typing import Any, Dict +import base64 + +import click +import mcp.types as types +from dotenv import load_dotenv +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + auth_token_context, + create_page, + get_page, + update_page_properties, + retrieve_page_property, + query_database, + get_database, + create_database, + update_database, + create_database_item, + search_notion, + get_user, + list_users, + get_me, + create_comment, + get_comments, + retrieve_block, + update_block, + delete_block, + get_block_children, + append_block_children, +) + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +NOTION_MCP_SERVER_PORT = int(os.getenv("NOTION_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + header_value = request_or_scope.headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + header_value = headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + + +@click.command() +@click.option( + "--port", default=NOTION_MCP_SERVER_PORT, help="Port to listen on for HTTP" +) +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("notion-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="notion_create_page", + description="Create a new page in Notion. If parent is not specified, a private page will be created in the workspace", + inputSchema={ + "type": "object", + "properties": { + "page": { + "type": "object", + "description": "Page configuration", + "properties": { + "content": { + "type": "string", + "description": "Markdown content for the page", + }, + "properties": { + "type": "object", + "description": "Page properties", + "properties": { + "title": { + "type": "string", + "description": "Page title", + } + } + }, + }, + }, + "parent": { + "type": "object", + "description": "Optional parent object with page_id, database_id, or workspace. If not specified, a private page will be created", + "properties": { + "page_id": { + "type": "string", + "description": "Parent page ID", + }, + "database_id": { + "type": "string", + "description": "Parent database ID", + }, + "workspace": { + "type": "boolean", + "description": "Whether parent is workspace", + }, + }, + }, + "icon": { + "type": "object", + "description": "Optional page icon", + }, + "cover": { + "type": "object", + "description": "Optional page cover", + }, + }, + "required": ["page"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_PAGE"} + ), + ), + types.Tool( + name="notion_get_page", + description="Retrieve a page from Notion", + inputSchema={ + "type": "object", + "properties": { + "page_id": { + "type": "string", + "description": "ID of the page to retrieve", + }, + "filter_properties": { + "type": "array", + "items": {"type": "string"}, + "description": "List of property IDs to limit the response", + }, + }, + "required": ["page_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_PAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_update_page_properties", + description="Update properties of a page", + inputSchema={ + "type": "object", + "properties": { + "page_id": { + "type": "string", + "description": "ID of the page to update", + }, + "properties": { + "type": "object", + "description": "Properties to update", + }, + "icon": { + "type": "object", + "description": "Optional new icon", + }, + "cover": { + "type": "object", + "description": "Optional new cover", + }, + "archived": { + "type": "boolean", + "description": "Whether to archive the page", + }, + "in_trash": { + "type": "boolean", + "description": "Whether to move the page to trash", + }, + }, + "required": ["page_id", "properties"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_PAGE"} + ), + ), + types.Tool( + name="notion_query_database", + description="Query a database in Notion", + inputSchema={ + "type": "object", + "properties": { + "database_id": { + "type": "string", + "description": "ID of the database to query", + }, + "filter": { + "type": "object", + "description": "Optional filter conditions", + }, + "sorts": { + "type": "array", + "items": {"type": "object"}, + "description": "Optional sort conditions", + }, + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + "filter_properties": { + "type": "array", + "items": {"type": "string"}, + "description": "List of property IDs to limit the response", + }, + "archived": { + "type": "boolean", + "description": "Whether to include archived pages", + }, + "in_trash": { + "type": "boolean", + "description": "Whether to include pages in trash", + }, + }, + "required": ["database_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_DATABASE", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_get_database", + description="Retrieve a database from Notion", + inputSchema={ + "type": "object", + "properties": { + "database_id": { + "type": "string", + "description": "ID of the database to retrieve", + }, + }, + "required": ["database_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_DATABASE", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_create_database", + description="Create a new database in Notion", + inputSchema={ + "type": "object", + "properties": { + "parent": { + "type": "object", + "description": "Parent page object", + }, + "title": { + "type": "array", + "items": {"type": "object"}, + "description": "Database title as rich text array", + }, + "properties": { + "type": "object", + "description": "Database properties schema", + }, + "icon": { + "type": "object", + "description": "Optional database icon", + }, + "cover": { + "type": "object", + "description": "Optional database cover", + }, + "description": { + "type": "array", + "items": {"type": "object"}, + "description": "Optional description as rich text array", + }, + }, + "required": ["parent", "title", "properties"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_DATABASE"} + ), + ), + types.Tool( + name="notion_update_database", + description="Update a database in Notion", + inputSchema={ + "type": "object", + "properties": { + "database_id": { + "type": "string", + "description": "ID of the database to update", + }, + "title": { + "type": "array", + "items": {"type": "object"}, + "description": "Optional new title", + }, + "description": { + "type": "array", + "items": {"type": "object"}, + "description": "Optional new description", + }, + "properties": { + "type": "object", + "description": "Optional updated properties schema", + }, + "icon": { + "type": "object", + "description": "Optional new icon", + }, + "cover": { + "type": "object", + "description": "Optional new cover", + }, + "archived": { + "type": "boolean", + "description": "Whether to archive the database", + }, + }, + "required": ["database_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_DATABASE"} + ), + ), + types.Tool( + name="notion_create_database_item", + description="Create a new item (page) in a database", + inputSchema={ + "type": "object", + "properties": { + "database_id": { + "type": "string", + "description": "ID of the database", + }, + "properties": { + "type": "object", + "description": "Item properties", + }, + "children": { + "type": "array", + "items": {"type": "object"}, + "description": "Optional array of block objects", + }, + "icon": { + "type": "object", + "description": "Optional item icon", + }, + "cover": { + "type": "object", + "description": "Optional item cover", + }, + }, + "required": ["database_id", "properties"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_DATABASE"} + ), + ), + + types.Tool( + name="notion_search", + description="Search for pages and databases in Notion", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query", + }, + "sort": { + "type": "object", + "description": "Optional sort criteria", + }, + "filter": { + "type": "object", + "description": "Optional filter conditions", + }, + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_get_user", + description="Retrieve a user from Notion", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "ID of the user to retrieve", + }, + }, + "required": ["user_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_list_users", + description="List all users in the workspace", + inputSchema={ + "type": "object", + "properties": { + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_USER", "readOnlyHint": True} + ), + ), + + types.Tool( + name="notion_create_comment", + description="Create a comment on a page or discussion", + inputSchema={ + "type": "object", + "properties": { + "parent": { + "type": "object", + "description": "Parent object (page_id)", + }, + "rich_text": { + "type": "array", + "items": {"type": "object"}, + "description": "Comment content as rich text array", + }, + "discussion_id": { + "type": "string", + "description": "Optional discussion thread ID", + }, + }, + "required": ["rich_text"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_COMMENT"} + ), + ), + types.Tool( + name="notion_get_comments", + description="Retrieve comments from a page or block", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the block or page", + }, + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + }, + "required": ["block_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_COMMENT", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_get_me", + description="Retrieve your token's bot user information", + inputSchema={ + "type": "object", + "properties": {}, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_retrieve_page_property", + description="Retrieve a specific property from a page", + inputSchema={ + "type": "object", + "properties": { + "page_id": { + "type": "string", + "description": "ID of the page", + }, + "property_id": { + "type": "string", + "description": "ID of the property to retrieve", + }, + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + }, + "required": ["page_id", "property_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_PAGE", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_retrieve_block", + description="Retrieve a block from Notion", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the block to retrieve", + }, + }, + "required": ["block_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_BLOCK", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_update_block", + description="Update a block in Notion", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the block to update", + }, + "block_type": { + "type": "object", + "description": "Block type and properties to update", + }, + "archived": { + "type": "boolean", + "description": "Whether to archive the block", + }, + }, + "required": ["block_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_BLOCK"} + ), + ), + types.Tool( + name="notion_delete_block", + description="Delete a block from Notion", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the block to delete", + }, + }, + "required": ["block_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_BLOCK"} + ), + ), + types.Tool( + name="notion_get_block_children", + description="Retrieve children of a block", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the parent block", + }, + "start_cursor": { + "type": "string", + "description": "Cursor for pagination", + }, + "page_size": { + "type": "integer", + "description": "Number of results to return (max 100)", + }, + }, + "required": ["block_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_BLOCK", "readOnlyHint": True} + ), + ), + types.Tool( + name="notion_append_block_children", + description="Append block children to a container block", + inputSchema={ + "type": "object", + "properties": { + "block_id": { + "type": "string", + "description": "ID of the parent block", + }, + "children": { + "type": "array", + "items": {"type": "object"}, + "description": "Array of block objects to append", + }, + "after": { + "type": "string", + "description": "ID of the existing block to append after", + }, + }, + "required": ["block_id", "children"], + }, + annotations=types.ToolAnnotations( + **{"category": "NOTION_BLOCK"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + # Log the tool call with name and arguments + logger.info(f"Tool called: {name}") + logger.debug(f"Tool arguments: {json.dumps(arguments, indent=2)}") + + if name == "notion_create_page": + try: + result = await create_page( + page=arguments.get("page"), + parent=arguments.get("parent"), + properties=arguments.get("properties"), # Support old format + children=arguments.get("children"), # Support old format + icon=arguments.get("icon"), + cover=arguments.get("cover"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_page": + try: + result = await get_page( + page_id=arguments.get("page_id"), + filter_properties=arguments.get("filter_properties"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_update_page_properties": + try: + result = await update_page_properties( + page_id=arguments.get("page_id"), + properties=arguments.get("properties"), + icon=arguments.get("icon"), + cover=arguments.get("cover"), + archived=arguments.get("archived"), + in_trash=arguments.get("in_trash"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_query_database": + try: + result = await query_database( + database_id=arguments.get("database_id"), + filter_conditions=arguments.get("filter"), + sorts=arguments.get("sorts"), + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + filter_properties=arguments.get("filter_properties"), + archived=arguments.get("archived"), + in_trash=arguments.get("in_trash"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_database": + try: + result = await get_database(database_id=arguments.get("database_id")) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_create_database": + try: + result = await create_database( + parent=arguments.get("parent"), + title=arguments.get("title"), + properties=arguments.get("properties"), + icon=arguments.get("icon"), + cover=arguments.get("cover"), + description=arguments.get("description"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_update_database": + try: + result = await update_database( + database_id=arguments.get("database_id"), + title=arguments.get("title"), + description=arguments.get("description"), + properties=arguments.get("properties"), + icon=arguments.get("icon"), + cover=arguments.get("cover"), + archived=arguments.get("archived"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_create_database_item": + try: + result = await create_database_item( + database_id=arguments.get("database_id"), + properties=arguments.get("properties"), + children=arguments.get("children"), + icon=arguments.get("icon"), + cover=arguments.get("cover"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "notion_search": + try: + result = await search_notion( + query=arguments.get("query"), + sort=arguments.get("sort"), + filter_conditions=arguments.get("filter"), + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_user": + try: + result = await get_user(user_id=arguments.get("user_id")) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_list_users": + try: + result = await list_users( + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "notion_create_comment": + try: + result = await create_comment( + parent=arguments.get("parent"), + rich_text=arguments.get("rich_text"), + discussion_id=arguments.get("discussion_id"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_comments": + try: + result = await get_comments( + block_id=arguments.get("block_id"), + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_me": + try: + result = await get_me() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_retrieve_page_property": + try: + result = await retrieve_page_property( + page_id=arguments.get("page_id"), + property_id=arguments.get("property_id"), + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_retrieve_block": + try: + result = await retrieve_block(block_id=arguments.get("block_id")) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_update_block": + try: + result = await update_block( + block_id=arguments.get("block_id"), + block_type=arguments.get("block_type"), + archived=arguments.get("archived"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_delete_block": + try: + result = await delete_block(block_id=arguments.get("block_id")) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_get_block_children": + try: + result = await get_block_children( + block_id=arguments.get("block_id"), + start_cursor=arguments.get("start_cursor"), + page_size=arguments.get("page_size"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "notion_append_block_children": + try: + result = await append_block_children( + block_id=arguments.get("block_id"), + children=arguments.get("children"), + after=arguments.get("after"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/notion/tools/__init__.py b/mcp_servers/notion/tools/__init__.py new file mode 100644 index 00000000..65e9e780 --- /dev/null +++ b/mcp_servers/notion/tools/__init__.py @@ -0,0 +1,31 @@ +from .base import auth_token_context +from .pages import create_page, get_page, update_page_properties, retrieve_page_property +from .databases import query_database, get_database, create_database, update_database, create_database_item +from .search import search_notion +from .users import get_user, list_users, get_me +from .comments import create_comment, get_comments +from .blocks import retrieve_block, update_block, delete_block, get_block_children, append_block_children + +__all__ = [ + 'auth_token_context', + 'create_page', + 'get_page', + 'update_page_properties', + 'retrieve_page_property', + 'query_database', + 'get_database', + 'create_database', + 'update_database', + 'create_database_item', + 'search_notion', + 'get_user', + 'list_users', + 'get_me', + 'create_comment', + 'get_comments', + 'retrieve_block', + 'update_block', + 'delete_block', + 'get_block_children', + 'append_block_children' +] \ No newline at end of file diff --git a/mcp_servers/notion/tools/base.py b/mcp_servers/notion/tools/base.py new file mode 100644 index 00000000..c72b066a --- /dev/null +++ b/mcp_servers/notion/tools/base.py @@ -0,0 +1,85 @@ +import os +from typing import Optional +from contextvars import ContextVar +from notion_client import Client + +# Context variable to store auth token for the current request +auth_token_context: ContextVar[str] = ContextVar('auth_token', default="") + + +def get_notion_client() -> Client: + """Get Notion client with authentication token from context or environment.""" + # Try to get token from context first (for HTTP requests) + try: + token = auth_token_context.get() + if token: + return Client(auth=token) + except LookupError: + pass + + # Fall back to environment variable + token = os.getenv("NOTION_API_KEY") + if not token: + raise ValueError("Notion API key not found. Please set NOTION_API_KEY environment variable or provide x-auth-token header.") + + return Client(auth=token) + + +def handle_notion_error(error: Exception) -> dict: + """Handle Notion API errors and return formatted error response.""" + error_msg = str(error) + + if "Unauthorized" in error_msg: + return { + "error": "Authentication failed. Please check your Notion API key.", + "type": "authentication_error" + } + elif "Not found" in error_msg: + return { + "error": "The requested resource was not found. Check the ID and permissions.", + "type": "not_found_error" + } + elif "Forbidden" in error_msg: + return { + "error": "Access denied. The integration may not have permission to access this resource.", + "type": "permission_error" + } + else: + return { + "error": f"Notion API error: {error_msg}", + "type": "api_error" + } + + +def validate_uuid(uuid_string: str) -> bool: + """Validate if a string is a valid UUID format.""" + import re + uuid_pattern = re.compile( + r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', + re.IGNORECASE + ) + return bool(uuid_pattern.match(uuid_string)) + + +def format_notion_id(notion_id: str) -> str: + """Format Notion ID by removing dashes if present.""" + return notion_id.replace('-', '') + + +def clean_notion_response(response: dict) -> dict: + """Clean up Notion API response by removing unnecessary fields.""" + if isinstance(response, dict): + # Remove common unnecessary fields + cleaned = {k: v for k, v in response.items() + if k not in ['request_id', 'developer_survey']} + + # Recursively clean nested dictionaries + for key, value in cleaned.items(): + if isinstance(value, dict): + cleaned[key] = clean_notion_response(value) + elif isinstance(value, list): + cleaned[key] = [clean_notion_response(item) if isinstance(item, dict) else item + for item in value] + + return cleaned + return response \ No newline at end of file diff --git a/mcp_servers/notion/tools/blocks.py b/mcp_servers/notion/tools/blocks.py new file mode 100644 index 00000000..623f479e --- /dev/null +++ b/mcp_servers/notion/tools/blocks.py @@ -0,0 +1,89 @@ +from typing import Dict, Any, Optional, List +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +async def retrieve_block(block_id: str) -> Dict[str, Any]: + """Retrieve a block from Notion.""" + try: + notion = get_notion_client() + response = notion.blocks.retrieve(block_id) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def update_block( + block_id: str, + block_type: Optional[Dict[str, Any]] = None, + archived: Optional[bool] = None +) -> Dict[str, Any]: + """Update a block in Notion.""" + try: + notion = get_notion_client() + + update_data = {} + if block_type is not None: + # The block_type should contain the specific block type and its properties + update_data.update(block_type) + if archived is not None: + update_data["archived"] = archived + + response = notion.blocks.update(block_id, **update_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def delete_block(block_id: str) -> Dict[str, Any]: + """Delete a block from Notion.""" + try: + notion = get_notion_client() + response = notion.blocks.delete(block_id) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def get_block_children( + block_id: str, + start_cursor: Optional[str] = None, + page_size: Optional[int] = None +) -> Dict[str, Any]: + """Retrieve children of a block.""" + try: + notion = get_notion_client() + + params = {} + if start_cursor: + params["start_cursor"] = start_cursor + if page_size: + params["page_size"] = page_size + + response = notion.blocks.children.list(block_id, **params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def append_block_children( + block_id: str, + children: List[Dict[str, Any]], + after: Optional[str] = None +) -> Dict[str, Any]: + """Append block children to a container block.""" + try: + notion = get_notion_client() + + append_data = {"children": children} + if after: + append_data["after"] = after + + response = notion.blocks.children.append(block_id, **append_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) \ No newline at end of file diff --git a/mcp_servers/notion/tools/comments.py b/mcp_servers/notion/tools/comments.py new file mode 100644 index 00000000..d24febe0 --- /dev/null +++ b/mcp_servers/notion/tools/comments.py @@ -0,0 +1,49 @@ +from typing import Dict, Any, Optional, List +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +async def create_comment( + parent: Dict[str, Any], + rich_text: List[Dict[str, Any]], + discussion_id: Optional[str] = None +) -> Dict[str, Any]: + """Create a comment on a page or discussion.""" + try: + notion = get_notion_client() + + comment_data = { + "rich_text": rich_text + } + + if discussion_id: + comment_data["discussion_id"] = discussion_id + else: + comment_data["parent"] = parent + + response = notion.comments.create(**comment_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def get_comments( + block_id: str, + start_cursor: Optional[str] = None, + page_size: Optional[int] = None +) -> Dict[str, Any]: + """Retrieve comments from a page or block.""" + try: + notion = get_notion_client() + + params = {"block_id": block_id} + if start_cursor: + params["start_cursor"] = start_cursor + if page_size: + params["page_size"] = page_size + + response = notion.comments.list(**params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) \ No newline at end of file diff --git a/mcp_servers/notion/tools/databases.py b/mcp_servers/notion/tools/databases.py new file mode 100644 index 00000000..058f70b4 --- /dev/null +++ b/mcp_servers/notion/tools/databases.py @@ -0,0 +1,146 @@ +from typing import Dict, Any, Optional, List +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +async def query_database( + database_id: str, + filter_conditions: Optional[Dict[str, Any]] = None, + sorts: Optional[List[Dict[str, Any]]] = None, + start_cursor: Optional[str] = None, + page_size: Optional[int] = None, + filter_properties: Optional[List[str]] = None, + archived: Optional[bool] = None, + in_trash: Optional[bool] = None +) -> Dict[str, Any]: + """Query a database in Notion.""" + try: + notion = get_notion_client() + + query_params = {} + if filter_conditions: + query_params["filter"] = filter_conditions + if sorts: + query_params["sorts"] = sorts + if start_cursor: + query_params["start_cursor"] = start_cursor + if page_size: + query_params["page_size"] = page_size + if filter_properties: + query_params["filter_properties"] = filter_properties + if archived is not None: + query_params["archived"] = archived + if in_trash is not None: + query_params["in_trash"] = in_trash + + response = notion.databases.query(database_id, **query_params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def get_database(database_id: str) -> Dict[str, Any]: + """Retrieve a database from Notion.""" + try: + notion = get_notion_client() + response = notion.databases.retrieve(database_id) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def create_database( + parent: Dict[str, Any], + title: List[Dict[str, Any]], + properties: Dict[str, Any], + icon: Optional[Dict[str, Any]] = None, + cover: Optional[Dict[str, Any]] = None, + description: Optional[List[Dict[str, Any]]] = None +) -> Dict[str, Any]: + """Create a new database in Notion.""" + try: + notion = get_notion_client() + + database_data = { + "parent": parent, + "title": title, + "properties": properties + } + + if icon: + database_data["icon"] = icon + if cover: + database_data["cover"] = cover + if description: + database_data["description"] = description + + response = notion.databases.create(**database_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def update_database( + database_id: str, + title: Optional[List[Dict[str, Any]]] = None, + description: Optional[List[Dict[str, Any]]] = None, + properties: Optional[Dict[str, Any]] = None, + icon: Optional[Dict[str, Any]] = None, + cover: Optional[Dict[str, Any]] = None, + archived: Optional[bool] = None +) -> Dict[str, Any]: + """Update a database in Notion.""" + try: + notion = get_notion_client() + + update_data = {} + if title is not None: + update_data["title"] = title + if description is not None: + update_data["description"] = description + if properties is not None: + update_data["properties"] = properties + if icon is not None: + update_data["icon"] = icon + if cover is not None: + update_data["cover"] = cover + if archived is not None: + update_data["archived"] = archived + + response = notion.databases.update(database_id, **update_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def create_database_item( + database_id: str, + properties: Dict[str, Any], + children: Optional[List[Dict[str, Any]]] = None, + icon: Optional[Dict[str, Any]] = None, + cover: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Create a new item (page) in a database.""" + try: + notion = get_notion_client() + + page_data = { + "parent": {"database_id": database_id}, + "properties": properties + } + + if children: + page_data["children"] = children + if icon: + page_data["icon"] = icon + if cover: + page_data["cover"] = cover + + response = notion.pages.create(**page_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) \ No newline at end of file diff --git a/mcp_servers/notion/tools/pages.py b/mcp_servers/notion/tools/pages.py new file mode 100644 index 00000000..c5689fb4 --- /dev/null +++ b/mcp_servers/notion/tools/pages.py @@ -0,0 +1,257 @@ +from typing import Dict, Any, Optional, List +import re +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +def markdown_to_notion_blocks(markdown_content: str) -> List[Dict[str, Any]]: + """Convert markdown content to Notion block format.""" + blocks = [] + lines = markdown_content.split('\n') + i = 0 + + while i < len(lines): + line = lines[i] + + # Skip empty lines + if not line.strip(): + i += 1 + continue + + # Headers + if line.startswith('#'): + header_match = re.match(r'^(#{1,3})\s+(.+)', line) + if header_match: + level = len(header_match.group(1)) + text = header_match.group(2) + block_type = ['heading_1', 'heading_2', 'heading_3'][level - 1] + blocks.append({ + "object": "block", + "type": block_type, + block_type: { + "rich_text": [{ + "type": "text", + "text": {"content": text} + }] + } + }) + i += 1 + continue + + # Code blocks + if line.startswith('```'): + code_lines = [] + language = line[3:].strip() or "plain text" + i += 1 + while i < len(lines) and not lines[i].startswith('```'): + code_lines.append(lines[i]) + i += 1 + blocks.append({ + "object": "block", + "type": "code", + "code": { + "rich_text": [{ + "type": "text", + "text": {"content": '\n'.join(code_lines)} + }], + "language": language + } + }) + i += 1 + continue + + # Bullet lists + if line.startswith('- ') or line.startswith('* '): + blocks.append({ + "object": "block", + "type": "bulleted_list_item", + "bulleted_list_item": { + "rich_text": [{ + "type": "text", + "text": {"content": line[2:].strip()} + }] + } + }) + i += 1 + continue + + # Numbered lists + numbered_match = re.match(r'^\d+\.\s+(.+)', line) + if numbered_match: + blocks.append({ + "object": "block", + "type": "numbered_list_item", + "numbered_list_item": { + "rich_text": [{ + "type": "text", + "text": {"content": numbered_match.group(1)} + }] + } + }) + i += 1 + continue + + # Regular paragraphs + blocks.append({ + "object": "block", + "type": "paragraph", + "paragraph": { + "rich_text": [{ + "type": "text", + "text": {"content": line} + }] + } + }) + i += 1 + + return blocks + + +async def create_page( + page: Optional[Dict[str, Any]] = None, + parent: Optional[Dict[str, Any]] = None, + properties: Optional[Dict[str, Any]] = None, + children: Optional[List[Dict[str, Any]]] = None, + icon: Optional[Dict[str, Any]] = None, + cover: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Create a new page in Notion with support for both old and new input formats. + + New format: + - page: Contains content (markdown) and properties (with title) + - parent: Contains page_id, database_id, or workspace (optional - defaults to private page) + + Old format: + - properties: Direct properties dict + - parent: Parent object (optional - defaults to private page) + - children: Optional blocks + + If parent is not specified or cannot be found, a private page will be created in the workspace. + """ + try: + notion = get_notion_client() + + # Handle new format with page object + if page: + # Extract and convert content to blocks if provided + if page.get('content'): + children = markdown_to_notion_blocks(page['content']) + + # Extract properties and format title + if page.get('properties'): + page_props = page['properties'] + if 'title' in page_props: + # Convert simple title string to Notion title property format + properties = { + "title": { + "title": [{ + "type": "text", + "text": {"content": page_props['title']} + }] + } + } + # Add any other properties from page_props + for key, value in page_props.items(): + if key != 'title': + properties[key] = value + else: + properties = page_props + + # Ensure we have properties + if not properties: + raise ValueError("Properties are required") + + # If parent is not specified, create a private page (workspace-level) + if not parent: + parent = {"workspace": True} + + # Build the page data + page_data = { + "parent": parent, + "properties": properties + } + + if children: + page_data["children"] = children + if icon: + page_data["icon"] = icon + if cover: + page_data["cover"] = cover + + response = notion.pages.create(**page_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def get_page( + page_id: str, + filter_properties: Optional[List[str]] = None +) -> Dict[str, Any]: + """Retrieve a page from Notion.""" + try: + notion = get_notion_client() + + params = {} + if filter_properties: + params["filter_properties"] = filter_properties + + response = notion.pages.retrieve(page_id, **params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def retrieve_page_property( + page_id: str, + property_id: str, + start_cursor: Optional[str] = None, + page_size: Optional[int] = None +) -> Dict[str, Any]: + """Retrieve a specific property from a page.""" + try: + notion = get_notion_client() + + params = {} + if start_cursor: + params["start_cursor"] = start_cursor + if page_size: + params["page_size"] = page_size + + response = notion.pages.properties.retrieve(page_id, property_id, **params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def update_page_properties( + page_id: str, + properties: Dict[str, Any], + icon: Optional[Dict[str, Any]] = None, + cover: Optional[Dict[str, Any]] = None, + archived: Optional[bool] = None, + in_trash: Optional[bool] = None +) -> Dict[str, Any]: + """Update properties of a page.""" + try: + notion = get_notion_client() + + update_data = {"properties": properties} + + if icon is not None: + update_data["icon"] = icon + if cover is not None: + update_data["cover"] = cover + if archived is not None: + update_data["archived"] = archived + if in_trash is not None: + update_data["in_trash"] = in_trash + + response = notion.pages.update(page_id, **update_data) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) \ No newline at end of file diff --git a/mcp_servers/notion/tools/search.py b/mcp_servers/notion/tools/search.py new file mode 100644 index 00000000..af0b0c43 --- /dev/null +++ b/mcp_servers/notion/tools/search.py @@ -0,0 +1,32 @@ +from typing import Dict, Any, Optional +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +async def search_notion( + query: Optional[str] = None, + sort: Optional[Dict[str, Any]] = None, + filter_conditions: Optional[Dict[str, Any]] = None, + start_cursor: Optional[str] = None, + page_size: Optional[int] = None +) -> Dict[str, Any]: + """Search for pages and databases in Notion.""" + try: + notion = get_notion_client() + + search_params = {} + if query: + search_params["query"] = query + if sort: + search_params["sort"] = sort + if filter_conditions: + search_params["filter"] = filter_conditions + if start_cursor: + search_params["start_cursor"] = start_cursor + if page_size: + search_params["page_size"] = page_size + + response = notion.search(**search_params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) \ No newline at end of file diff --git a/mcp_servers/notion/tools/users.py b/mcp_servers/notion/tools/users.py new file mode 100644 index 00000000..fac5f9ac --- /dev/null +++ b/mcp_servers/notion/tools/users.py @@ -0,0 +1,48 @@ +from typing import Dict, Any, Optional +from .base import get_notion_client, handle_notion_error, clean_notion_response + + +async def get_user(user_id: str) -> Dict[str, Any]: + """Retrieve a user from Notion.""" + try: + notion = get_notion_client() + response = notion.users.retrieve(user_id) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def get_me() -> Dict[str, Any]: + """Retrieve your token's bot user information.""" + try: + notion = get_notion_client() + response = notion.users.me() + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + +async def list_users( + start_cursor: Optional[str] = None, + page_size: Optional[int] = None +) -> Dict[str, Any]: + """List all users in the workspace.""" + try: + notion = get_notion_client() + + params = {} + if start_cursor: + params["start_cursor"] = start_cursor + if page_size: + params["page_size"] = page_size + + response = notion.users.list(**params) + return clean_notion_response(response) + + except Exception as e: + return handle_notion_error(e) + + + \ No newline at end of file diff --git a/mcp_servers/onedrive/.env.example b/mcp_servers/onedrive/.env.example new file mode 100644 index 00000000..f5677d22 --- /dev/null +++ b/mcp_servers/onedrive/.env.example @@ -0,0 +1,2 @@ +ONEDRIVE_ACCESS_TOKEN=ONEDRIVE_ACCESS_TOKEN +ONEDRIVE_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/onedrive/.python-version b/mcp_servers/onedrive/.python-version new file mode 100644 index 00000000..24ee5b1b --- /dev/null +++ b/mcp_servers/onedrive/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/mcp_servers/onedrive/Dockerfile b/mcp_servers/onedrive/Dockerfile new file mode 100644 index 00000000..a93c36f1 --- /dev/null +++ b/mcp_servers/onedrive/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only requirements to leverage cache +COPY mcp_servers/onedrive/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the rest of the code +COPY mcp_servers/onedrive/server.py . +COPY mcp_servers/onedrive/tools/ ./tools/ + +# Expose whatever port your onedrive server uses (adjust if different) +EXPOSE 5000 + +# Command to run +CMD ["python", "server.py"] diff --git a/mcp_servers/onedrive/README.md b/mcp_servers/onedrive/README.md new file mode 100644 index 00000000..ec01d5e8 --- /dev/null +++ b/mcp_servers/onedrive/README.md @@ -0,0 +1,78 @@ +# OneDrive MCP Server + +A Model Context Protocol (MCP) server for Microsoft OneDrive integration. Manage files, folders, and sharing using OneDrive's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to OneDrive with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("ONEDRIVE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/onedrive-mcp-server:latest + + +# Run OneDrive MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/onedrive-mcp-server:latest + + +# Run OneDrive MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_onedrive_access_token_here"}' \ + ghcr.io/klavis-ai/onedrive-mcp-server:latest +``` + +**OAuth Setup:** OneDrive requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **File Management**: Upload, download, and manage OneDrive files +- **Folder Operations**: Create, move, and organize folders +- **Sharing**: Create and manage shared links and permissions +- **Search**: Search files and folders by name and content +- **Collaboration**: Handle real-time collaboration and version control + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/onedrive/main.py b/mcp_servers/onedrive/main.py new file mode 100644 index 00000000..7b81c054 --- /dev/null +++ b/mcp_servers/onedrive/main.py @@ -0,0 +1,6 @@ +def main(): + print("Hello from onedrive!") + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/onedrive/pyproject.toml b/mcp_servers/onedrive/pyproject.toml new file mode 100644 index 00000000..9e956e90 --- /dev/null +++ b/mcp_servers/onedrive/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "onedrive" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "click>=8.3.0", + "httpx>=0.28.1", + "mcp==1.11.0", + "python-dotenv>=1.1.1", + "starlette>=0.48.0", + "uvicorn>=0.37.0", +] diff --git a/mcp_servers/onedrive/requirements.txt b/mcp_servers/onedrive/requirements.txt new file mode 100644 index 00000000..d02ba16b --- /dev/null +++ b/mcp_servers/onedrive/requirements.txt @@ -0,0 +1 @@ +mcp==1.11.0 diff --git a/mcp_servers/onedrive/server.py b/mcp_servers/onedrive/server.py new file mode 100644 index 00000000..85d19300 --- /dev/null +++ b/mcp_servers/onedrive/server.py @@ -0,0 +1,635 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict, List + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + # Base + auth_token_context, + + # Both Items (Files & Folders) + onedrive_rename_item, + onedrive_move_item, + onedrive_delete_item, + + # Files + onedrive_read_file_content, + onedrive_create_file, + + # Folders + onedrive_create_folder, + + # Search & List + onedrive_list_root_files_folders, + onedrive_list_inside_folder, + onedrive_search_item_by_name, + onedrive_search_folder_by_name, + onedrive_get_item_by_id, + + #Sharing + onedrive_list_shared_items, +) + + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +ONEDRIVE_MCP_SERVER_PORT = int(os.getenv("ONEDRIVE_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + + return "" + +@click.command() +@click.option("--port", default=ONEDRIVE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) + +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("onedrive-mcp-server") +#------------------------------------------------------------------------------------- + + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # File Operations + types.Tool( + name="onedrive_rename_item", + description="Rename a file or folder in OneDrive by its ID.", + inputSchema={ + "type": "object", + "properties": { + "file_id": {"type": "string", "description": "ID of the file/folder to rename"}, + "new_name": {"type": "string", "description": "New name for the item"} + }, + "required": ["file_id", "new_name"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM"}) + ), + types.Tool( + name="onedrive_move_item", + description="Move an item to a different folder in OneDrive.", + inputSchema={ + "type": "object", + "properties": { + "item_id": {"type": "string", "description": "ID of the item to move"}, + "new_parent_id": {"type": "string", "description": "ID of the destination folder"} + }, + "required": ["item_id", "new_parent_id"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM"}) + ), + types.Tool( + name="onedrive_delete_item", + description="Delete an item from OneDrive by its ID.", + inputSchema={ + "type": "object", + "properties": { + "item_id": {"type": "string", "description": "ID of the item to delete"} + }, + "required": ["item_id"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM"}) + ), + + # File Content Operations + types.Tool( + name="onedrive_read_file_content", + description="Read the content of a file from OneDrive by its ID.", + inputSchema={ + "type": "object", + "properties": { + "file_id": {"type": "string", "description": "ID of the file to read"} + }, + "required": ["file_id"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FILE"}) + ), + + # File Creation + types.Tool( + name="onedrive_create_file", + description="Create a new file in OneDrive.", + inputSchema={ + "type": "object", + "properties": { + "parent_folder": {"type": "string", "description": "'root' to create in root or ID of the parent folder"}, + "new_file_name": {"type": "string", "description": "Name for the new file"}, + "data": {"type": "string", "description": "Content for the new file (optional)"}, + "if_exists": { + "type": "string", + "enum": ["error", "rename", "replace"], + "default": "error", + "description": "Behavior when file exists: 'error' (abort), 'rename' (create unique name), 'replace' (overwrite)" + } + }, + "required": ["parent_folder", "new_file_name"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FILE"}) + ), + + # Folder Operations + types.Tool( + name="onedrive_create_folder", + description="Create a new folder in OneDrive.", + inputSchema={ + "type": "object", + "properties": { + "parent_folder": {"type": "string", "description": "'root' to create in root or ID of the parent folder"}, + "new_folder_name": {"type": "string", "description": "Name for the new folder"}, + "behavior": { + "type": "string", + "enum": ["fail", "replace", "rename"], + "default": "fail", + "description": "Conflict resolution: 'fail' (return error), 'replace' (overwrite), 'rename' (unique name)" + } + }, + "required": ["parent_folder", "new_folder_name"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FOLDER"}) + ), + + # Listing & Searching + types.Tool( + name="onedrive_list_root_files_folders", + description="List all files and folders in the root of OneDrive.", + inputSchema={ + "type": "object", + "properties": {} + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FOLDER", "readOnlyHint": True}) + ), + types.Tool( + name="onedrive_list_inside_folder", + description="List all items inside a specific folder.", + inputSchema={ + "type": "object", + "properties": { + "folder_id": {"type": "string", "description": "ID of the folder to list"} + }, + "required": ["folder_id"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FOLDER", "readOnlyHint": True}) + ), + types.Tool( + name="onedrive_search_item_by_name", + description="Search for items by name in OneDrive.", + inputSchema={ + "type": "object", + "properties": { + "itemname": {"type": "string", "description": "Name or partial name to search for"} + }, + "required": ["itemname"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM", "readOnlyHint": True}) + ), + types.Tool( + name="onedrive_search_folder_by_name", + description="Search for folders by name in OneDrive.", + inputSchema={ + "type": "object", + "properties": { + "folder_name": {"type": "string", "description": "Name or partial name to search for"} + }, + "required": ["folder_name"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_FOLDER", "readOnlyHint": True}) + ), + types.Tool( + name="onedrive_get_item_by_id", + description="Get item details by its ID.", + inputSchema={ + "type": "object", + "properties": { + "item_id": {"type": "string", "description": "ID of the item to retrieve"} + }, + "required": ["item_id"] + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM", "readOnlyHint": True}) + ), + + # Sharing & Permissions + types.Tool( + name="onedrive_list_shared_items", + description="List all items shared with the current user in OneDrive.", + inputSchema={ + "type": "object", + "properties": {} + }, + annotations=types.ToolAnnotations(**{"category": "ONEDRIVE_ITEM", "readOnlyHint": True}) + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # File Operations + if name == "onedrive_rename_item": + try: + result = await onedrive_rename_item( + file_id=arguments["file_id"], + new_name=arguments["new_name"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error renaming item: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_move_item": + try: + result = await onedrive_move_item( + item_id=arguments["item_id"], + new_parent_id=arguments["new_parent_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error moving item: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_delete_item": + try: + result = await onedrive_delete_item( + item_id=arguments["item_id"] + ) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error deleting item: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # File Content Operations + elif name == "onedrive_read_file_content": + try: + result = await onedrive_read_file_content( + file_id=arguments["file_id"] + ) + return [ + types.TextContent( + type="text", + text=result if isinstance(result, str) else json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error reading file content: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # File Creation + elif name == "onedrive_create_file": + try: + result = await onedrive_create_file( + parent_folder=arguments["parent_folder"], + new_file_name=arguments["new_file_name"], + data=arguments.get("data"), + if_exists=arguments.get("if_exists", "error") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating file: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Folder Operations + elif name == "onedrive_create_folder": + try: + result = await onedrive_create_folder( + parent_folder=arguments["parent_folder"], + new_folder_name=arguments["new_folder_name"], + behavior=arguments.get("behavior", "fail") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating folder: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Listing & Searching + elif name == "onedrive_list_root_files_folders": + try: + result = await onedrive_list_root_files_folders() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing root items: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_list_inside_folder": + try: + result = await onedrive_list_inside_folder( + folder_id=arguments["folder_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing folder contents: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_search_item_by_name": + try: + result = await onedrive_search_item_by_name( + itemname=arguments["itemname"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error searching items: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_search_folder_by_name": + try: + result = await onedrive_search_folder_by_name( + folder_name=arguments["folder_name"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error searching folders: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "onedrive_get_item_by_id": + try: + result = await onedrive_get_item_by_id( + item_id=arguments["item_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error getting item by ID: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Sharing & Permissions + elif name == "onedrive_list_shared_items": + try: + result = await onedrive_list_shared_items() + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing shared items: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown OneDrive tool: {name}", + ) + ] + + + +#--------------------------------------------------------------------------------------------- + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract access token from headers + access_token = extract_access_token(request) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract access token from headers + access_token = extract_access_token(scope) + + # Set the access token in context for this request + token = auth_token_context.set(access_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/onedrive/tools/__init__.py b/mcp_servers/onedrive/tools/__init__.py new file mode 100644 index 00000000..0df90495 --- /dev/null +++ b/mcp_servers/onedrive/tools/__init__.py @@ -0,0 +1,61 @@ +from .base import ( + auth_token_context +) + +from .combined_files_and_folder import ( + onedrive_rename_item, + onedrive_move_item, + onedrive_delete_item +) + +from .files import ( + onedrive_read_file_content, + onedrive_create_file, + #onedrive_create_file_in_root +) + +from .folders import ( + onedrive_create_folder, + #onedrive_create_folder_in_root +) + +from .onedrive_explore import ( + onedrive_list_root_files_folders, + onedrive_list_inside_folder, + onedrive_search_item_by_name, + onedrive_search_folder_by_name, + onedrive_get_item_by_id +) + +from .sharing import ( + onedrive_list_shared_items +) + +__all__ = [ + # Base + "auth_token_context", + + # Both Items (Files & Folders) + "onedrive_rename_item", + "onedrive_move_item", + "onedrive_delete_item", + + # Files + "onedrive_read_file_content", + "onedrive_create_file", + #"onedrive_create_file_in_root", + + # Folders + "onedrive_create_folder", + #"onedrive_create_folder_in_root", + + # Search & List + "onedrive_list_root_files_folders", + "onedrive_list_inside_folder", + "onedrive_search_item_by_name", + "onedrive_search_folder_by_name", + "onedrive_get_item_by_id", + + # Sharing + "onedrive_list_shared_items" +] \ No newline at end of file diff --git a/mcp_servers/onedrive/tools/base.py b/mcp_servers/onedrive/tools/base.py new file mode 100644 index 00000000..3ebcd1f2 --- /dev/null +++ b/mcp_servers/onedrive/tools/base.py @@ -0,0 +1,47 @@ +import logging +import os +from contextvars import ContextVar +from typing import Optional +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() + +logger = logging.getLogger(__name__) + +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + try: + token = auth_token_context.get() + if not token: + token = os.getenv("ONEDRIVE_ACCESS_TOKEN") + if not token: + raise RuntimeError("No authentication token available") + return token + except LookupError: + token = os.getenv("ONEDRIVE_ACCESS_TOKEN") + if not token: + raise RuntimeError("Authentication token not found in context or environment") + return token + +def get_onedrive_client() -> Optional[dict]: + """ + Return a simple client dict with base_url and headers. + """ + try: + auth_token = get_auth_token() + client = { + "base_url": "https://graph.microsoft.com/v1.0", + "headers": {'Authorization': f'Bearer {auth_token}'} + } + return client + except RuntimeError as e: + logger.warning(f"Failed to get auth token: {e}") + return None + except Exception as e: + logger.error(f"Failed to initialize OneDrive client: {e}") + return None + +if __name__ == "__main__": + print(get_onedrive_client()) diff --git a/mcp_servers/onedrive/tools/combined_files_and_folder.py b/mcp_servers/onedrive/tools/combined_files_and_folder.py new file mode 100644 index 00000000..2d3ef368 --- /dev/null +++ b/mcp_servers/onedrive/tools/combined_files_and_folder.py @@ -0,0 +1,120 @@ +import httpx +import logging +from typing import Tuple, Union +from .base import get_onedrive_client + +# Configure logging +logger = logging.getLogger(__name__) + + +async def onedrive_rename_item(file_id: str, new_name: str) -> Union[Tuple[str, dict], Tuple[str, int, str]]: + """ + Rename an item in OneDrive. + + Parameters: + - file_id: ID of the file/item to rename + - new_name: New name for the item + + Returns: + - Tuple with success message and response data OR error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{file_id}" + data = {"name": new_name} + + try: + logger.info(f"Renaming item {file_id} to {new_name}") + + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.patch( + url, + headers={ **client['headers'], "Content-Type": "application/json"}, + json=data + ) + return "Renamed successfully:", response.json() + except Exception as e: + logger.error(f"Exception occurred while renaming item: {e}") + return "Error:", str(e) + + +async def onedrive_move_item(item_id: str, new_parent_id: str) -> Union[Tuple[str, dict], Tuple[str, int, str]]: + """ + Move an item to a different folder in OneDrive. + + Parameters: + - item_id: ID of the item to move + - new_parent_id: ID of the destination folder + + Returns: + - Tuple with success message and response data OR error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{item_id}" + body = { + "parentReference": {"id": new_parent_id} + } + + try: + logger.info(f"Moving item {item_id} to parent {new_parent_id}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.patch(url, headers=client['headers'], json=body) + return "Item moved:", response.json() + except Exception as e: + logger.error(f"Exception occurred while moving item: {e}") + return ("Error:", str(e)) + + +async def onedrive_delete_item(item_id: str) -> Union[Tuple[str], Tuple[str, int, str]]: + """ + Deletes an item from OneDrive. + + Args: + item_id: ID of the item to delete. + + Returns: + A tuple with a success message OR an error message with details. + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client", 500, "Authentication client not found." + + url = f"{client['base_url']}/me/drive/items/{item_id}" + + try: + logger.info(f"Deleting item {item_id}") + # Although creating a client for each call is inefficient, this works + # as a self-contained function. + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.delete(url, headers=client['headers']) + + # Key Fix: Check the status code from the response. + # A successful delete operation returns 204 No Content. + if response.status_code == 204: + return "Deleted successfully" + else: + # If it's not 204, the API returned an error. + logger.error(f"Error deleting item {item_id}: {response.status_code} - {response.text}") + # Attempt to return the JSON error from the API, falling back to raw text. + try: + error_details = response.json() + except Exception: + error_details = response.text + return "Error:", response.status_code, str(error_details) + + except httpx.RequestError as exc: + # This handles network-level errors (e.g., cannot connect). + logger.error(f"Network error while deleting item {item_id}: {exc}") + return "Error:", 503, f"A network error occurred: {exc}" + except Exception as exc: + # This handles any other unexpected errors. + logger.error(f"An unexpected exception occurred while deleting item: {exc}") + return "Error:", 500, f"An unexpected error occurred: {exc}" diff --git a/mcp_servers/onedrive/tools/files.py b/mcp_servers/onedrive/tools/files.py new file mode 100644 index 00000000..8e66d073 --- /dev/null +++ b/mcp_servers/onedrive/tools/files.py @@ -0,0 +1,102 @@ +import httpx +import logging +import os +from typing import Tuple, Union, Dict, Any +from .base import get_onedrive_client +from .onedrive_explore import onedrive_list_inside_folder +import uuid + +# Configure logging +logger = logging.getLogger(__name__) + + +async def onedrive_read_file_content(file_id: str) -> Union[str, Tuple[str, int, str]]: + """ + Read the content of a file from OneDrive. + + Parameters: + - file_id: The ID of the file to read + + Returns: + - The file content as string if successful + - Tuple with error message, status code, and response text if failed + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{file_id}/content" + + try: + logger.info(f"Reading content of file ID: {file_id}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers'], follow_redirects=True) + return response.text + except Exception as e: + logger.error(f"Exception occurred while reading file content: {e}") + return "Error:", str(e) + +async def onedrive_create_file( + parent_folder: str, + new_file_name: str, + data: str = None, + if_exists: str = 'error' +) -> Union[Tuple[str, Dict], Tuple[str]]: + """ + Create a new file in a specific OneDrive folder. + + Parameters: + - parent_folder: 'root' to create in root or ID of the parent folder + - new_file_name: Name for the new file + - data: Content for the new file (optional) + - if_exists: Behavior when file exists ('error', 'rename', or 'replace') + + Returns: + - Tuple with success message and response JSON if successful + - Tuple with error message if failed + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + try: + logger.info(f"Creating file '{new_file_name}' in folder {parent_folder} with if_exists={if_exists}") + + # Step 1: list files/folders inside parent folder + result = await onedrive_list_inside_folder(parent_folder) + + if not result or len(result) < 2: + logger.error(f"Could not list contents of folder {parent_folder}") + return ("Could not list folder contents",) + + _, existing_items = result + + existing_names = [item['name'] for item in existing_items.get('value', [])] + + # Step 2: handle existing file + final_name = new_file_name + if new_file_name in existing_names: + if if_exists == 'error': + logger.warning(f"File '{new_file_name}' already exists in folder {parent_folder}") + return (f"File '{new_file_name}' already exists. Aborting.",) + elif if_exists == 'rename': + name, ext = os.path.splitext(new_file_name) + final_name = f"{name}_{uuid.uuid4().hex}{ext}" + logger.info(f"Renaming file to '{final_name}' due to naming conflict") + elif if_exists == 'replace': + logger.info(f"File exists, will replace '{new_file_name}'") + else: + logger.error(f"Invalid if_exists option: {if_exists}") + return ("Invalid if_exists option.",) + + # Step 3: create the file + url = f"{client['base_url']}/me/drive/items/{parent_folder}:/{final_name}:/content" + async with httpx.AsyncClient() as httpx_client: + put_response = await httpx_client.put(url, headers=client['headers'], data=data or '') + return "File created:", put_response.json() + except Exception as e: + logger.error(f"Exception occurred while creating file: {e}") + return "Error:", str(e) + diff --git a/mcp_servers/onedrive/tools/folders.py b/mcp_servers/onedrive/tools/folders.py new file mode 100644 index 00000000..5f70928d --- /dev/null +++ b/mcp_servers/onedrive/tools/folders.py @@ -0,0 +1,50 @@ +import httpx +import logging +from typing import Tuple, Union, Dict, Any +from .base import get_onedrive_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def onedrive_create_folder( + parent_folder: str, + new_folder_name: str, + behavior: str = "fail" +) -> Union[Tuple[str, Dict[str, Any]], Tuple[str, int, str]]: + """ + Create a new folder in a specific OneDrive parent folder. + + Parameters: + - parent_folder: 'root' to create in root or ID of the parent folder + - new_folder_name: Name for the new folder + - behavior: Conflict resolution behavior ("fail", "replace", or "rename") + Default is "fail" (return error if folder exists) + + Returns: + - On success: Tuple with success message and folder creation response JSON + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{parent_folder}/children" + data = { + "name": new_folder_name, + "folder": {}, + "@microsoft.graph.conflictBehavior": behavior + } + + try: + logger.info(f"Creating folder '{new_folder_name}' in parent {parent_folder} with behavior={behavior}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post( + url, + headers={**client['headers'], "Content-Type": "application/json"}, + json=data + ) + return "Folder created successfully:", response.json() + except Exception as e: + logger.error(f"Exception occurred while creating folder: {e}") + return "Error:", str(e) \ No newline at end of file diff --git a/mcp_servers/onedrive/tools/onedrive_explore.py b/mcp_servers/onedrive/tools/onedrive_explore.py new file mode 100644 index 00000000..2593c8db --- /dev/null +++ b/mcp_servers/onedrive/tools/onedrive_explore.py @@ -0,0 +1,144 @@ +import httpx +import logging +from typing import Tuple, Union, Dict, List, Any +from .base import get_onedrive_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def onedrive_list_root_files_folders() -> Union[Tuple[str, Dict[str, Any]], Tuple[str, int, str]]: + """ + List all files and folders in the root of OneDrive. + + Returns: + - On success: Tuple with status message and dictionary containing items + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/root/children" + + try: + logger.info("Listing files and folders in root directory") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + return "Files:", response.json() + except Exception as e: + logger.error(f"Exception while listing root items: {e}") + return "Error:", str(e) + +async def onedrive_list_inside_folder(folder_id: str) -> Union[Tuple[str, Dict[str, Any]], Tuple[str, int, str]]: + """ + List all items inside a specific folder. + + Parameters: + - folder_id: The ID of the folder to list contents from + + Returns: + - On success: Tuple with status message and dictionary containing items + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{folder_id}/children" + + try: + logger.info(f"Listing items inside folder ID: {folder_id}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + return "Items inside folder:", response.json() + except Exception as e: + logger.error(f"Exception while listing folder items: {e}") + return "Error:", str(e) + +async def onedrive_search_item_by_name(itemname: str) -> Union[Tuple[str, Dict[str, Any]], Tuple[str, int, str]]: + """ + Search for items by name in OneDrive. + + Parameters: + - itemname: The name or partial name of the item to search for + + Returns: + - On success: Tuple with status message and dictionary containing search results + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/root/search(q='{itemname}')" + + try: + logger.info(f"Searching for items with name: {itemname}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + return "Found items:", response.json() + except Exception as e: + logger.error(f"Exception while searching items: {e}") + return "Error:", str(e) + +async def onedrive_search_folder_by_name(folder_name: str) -> Union[Tuple[str, List[Dict[str, Any]]], Tuple[str, int, str]]: + """ + Search for folders by name in OneDrive. + + Parameters: + - folder_name: The name or partial name of the folder to search for + + Returns: + - On success: Tuple with status message and list of matching folders + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/root/search(q='{folder_name}')" + + try: + logger.info(f"Searching for folders with name: {folder_name}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + data = response.json() + folders = [item for item in data.get('value', []) if 'folder' in item] + return "Found folders:", folders + + except Exception as e: + logger.error(f"Exception while searching folders: {e}") + return "Error:", str(e) + +async def onedrive_get_item_by_id(item_id: str) -> Union[Dict[str, Any], Tuple[str, int, str]]: + """ + Get item details by its ID. + + Parameters: + - item_id: The ID of the item to retrieve + + Returns: + - On success: Dictionary containing item details + - On failure: Tuple with error message and details + """ + client = get_onedrive_client() + if not client: + logger.error("Could not get OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/items/{item_id}" + + try: + logger.info(f"Getting item with ID: {item_id}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + data = response.json() + logger.info(f"Successfully retrieved item: {data.get('name', 'unknown')}") + return data + except Exception as e: + logger.error(f"Exception while getting item: {e}") + return "Error:", str(e) \ No newline at end of file diff --git a/mcp_servers/onedrive/tools/sharing.py b/mcp_servers/onedrive/tools/sharing.py new file mode 100644 index 00000000..8d41fc03 --- /dev/null +++ b/mcp_servers/onedrive/tools/sharing.py @@ -0,0 +1,31 @@ +import httpx +import logging +from typing import Tuple, Union, Dict, Any, Literal +from .base import get_onedrive_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def onedrive_list_shared_items() -> Union[Tuple[str, Dict[str, Any]], Tuple[str, int, str]]: + """ + List all items shared with the current user in OneDrive. + + Returns: + - On success: Tuple with status message and dictionary containing shared items + - On failure: Tuple with error message and details (status code and response text) + """ + client = get_onedrive_client() + if not client: + logger.error("Failed to initialize OneDrive client") + return "Could not get OneDrive client" + + url = f"{client['base_url']}/me/drive/sharedWithMe" + + try: + logger.info("Requesting list of shared items") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + return "Items shared with me:", response.json() + except Exception as e: + logger.error(f"Exception while fetching shared items: {str(e)}") + return ("Error:", str(e)) diff --git a/mcp_servers/onedrive/uv.lock b/mcp_servers/onedrive/uv.lock new file mode 100644 index 00000000..b7a82900 --- /dev/null +++ b/mcp_servers/onedrive/uv.lock @@ -0,0 +1,425 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/f5/9506eb5578d5bbe9819ee8ba3198d0ad0e2fbe3bab8b257e4131ceb7dfb6/mcp-1.11.0.tar.gz", hash = "sha256:49a213df56bb9472ff83b3132a4825f5c8f5b120a90246f08b0dac6bedac44c8", size = 406907, upload-time = "2025-07-10T16:41:09.388Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/9c/c9ca79f9c512e4113a5d07043013110bb3369fc7770040c61378c7fbcf70/mcp-1.11.0-py3-none-any.whl", hash = "sha256:58deac37f7483e4b338524b98bc949b7c2b7c33d978f5fafab5bde041c5e2595", size = 155880, upload-time = "2025-07-10T16:41:07.935Z" }, +] + +[[package]] +name = "onedrive" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "python-dotenv" }, + { name = "starlette" }, + { name = "uvicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.3.0" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "mcp", specifier = "==1.11.0" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, + { name = "starlette", specifier = ">=0.48.0" }, + { name = "uvicorn", specifier = ">=0.37.0" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, +] diff --git a/mcp_servers/openrouter/.env.example b/mcp_servers/openrouter/.env.example new file mode 100644 index 00000000..cd654030 --- /dev/null +++ b/mcp_servers/openrouter/.env.example @@ -0,0 +1,2 @@ +OPENROUTER_API_KEY="api-key" +OPENROUTER_MCP_SERVER_PORT="8000" \ No newline at end of file diff --git a/mcp_servers/openrouter/Dockerfile b/mcp_servers/openrouter/Dockerfile new file mode 100644 index 00000000..c6be0d28 --- /dev/null +++ b/mcp_servers/openrouter/Dockerfile @@ -0,0 +1,32 @@ +FROM python:3.12-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY mcp_servers/openrouter/requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY mcp_servers/openrouter/ . + +# Create non-root user +RUN useradd --create-home --shell /bin/bash app && chown -R app:app /app +USER app + +# Expose port +EXPOSE 5000 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD python -c "import httpx; httpx.get('http://localhost:5000/mcp')" || exit 1 + +# Run the server +CMD ["python", "server.py", "--port", "5000", "--log-level", "INFO"] \ No newline at end of file diff --git a/mcp_servers/openrouter/README.md b/mcp_servers/openrouter/README.md new file mode 100644 index 00000000..71a57650 --- /dev/null +++ b/mcp_servers/openrouter/README.md @@ -0,0 +1,73 @@ +# OpenRouter MCP Server + +A Model Context Protocol (MCP) server for OpenRouter integration. Access multiple AI models through OpenRouter's unified API interface. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to OpenRouter with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("OPENROUTER", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/openrouter-mcp-server:latest + + +# Run OpenRouter MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/openrouter-mcp-server:latest +``` + +**API Key Setup:** Get your OpenRouter API key from the [OpenRouter Dashboard](https://openrouter.ai/keys). + +## šŸ› ļø Available Tools + +- **Model Access**: Access multiple AI models through unified interface +- **Chat Completions**: Generate text responses using various AI models +- **Model Comparison**: Compare outputs from different AI models +- **Usage Analytics**: Track API usage and model performance +- **Cost Management**: Monitor and manage API costs across models + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/openrouter/requirements.txt b/mcp_servers/openrouter/requirements.txt new file mode 100644 index 00000000..dab010af --- /dev/null +++ b/mcp_servers/openrouter/requirements.txt @@ -0,0 +1,8 @@ +mcp==1.11.0 +click>=8.0.0 +starlette>=0.27.0 +uvicorn>=0.24.0 +python-dotenv>=1.0.0 +httpx>=0.25.0 +pydantic>=2.0.0 +typing-extensions>=4.0.0 \ No newline at end of file diff --git a/mcp_servers/openrouter/server.py b/mcp_servers/openrouter/server.py new file mode 100644 index 00000000..e671e65f --- /dev/null +++ b/mcp_servers/openrouter/server.py @@ -0,0 +1,704 @@ +import contextlib +import logging +import os +import json +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools.base import OpenRouterToolExecutionError, auth_token_context + +# Import tools +from tools import models as model_tools +from tools import chat as chat_tools +from tools import usage as usage_tools +from tools import comparison as comparison_tools + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +OPENROUTER_MCP_SERVER_PORT = int(os.getenv("OPENROUTER_MCP_SERVER_PORT", "5000")) + + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + else: + auth_data = None + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + + +@click.command() +@click.option("--port", default=OPENROUTER_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + app = Server("openrouter-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="openrouter_list_models", + description="List available models on OpenRouter", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of models to return (1-100, default 50)", + "minimum": 1, + "maximum": 100, + }, + "next_page_token": { + "type": "string", + "description": "Token for pagination", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_search_models", + description="Search for models based on various criteria", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query string", + }, + "limit": { + "type": "integer", + "description": "Maximum number of models to return (1-100, default 20)", + "minimum": 1, + "maximum": 100, + }, + "category": { + "type": "string", + "description": "Filter by model category (e.g., 'chat', 'completion', 'embedding')", + }, + "provider": { + "type": "string", + "description": "Filter by provider (e.g., 'anthropic', 'openai', 'meta-llama')", + }, + }, + "required": ["query"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_model_pricing", + description="Get pricing information for a specific model", + inputSchema={ + "type": "object", + "properties": { + "model_id": { + "type": "string", + "description": "The ID of the model to get pricing for", + }, + }, + "required": ["model_id"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + + types.Tool( + name="openrouter_create_chat_completion", + description="Create a chat completion using OpenRouter", + inputSchema={ + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The model to use for completion", + }, + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "role": { + "type": "string", + "enum": ["system", "user", "assistant"], + "description": "The role of the message sender", + }, + "content": { + "type": "string", + "description": "The content of the message", + }, + }, + "required": ["role", "content"], + }, + "description": "List of message objects with 'role' and 'content'", + }, + "max_tokens": { + "type": "integer", + "description": "Maximum number of tokens to generate", + }, + "temperature": { + "type": "number", + "description": "Sampling temperature (0.0 to 2.0)", + "minimum": 0.0, + "maximum": 2.0, + }, + "top_p": { + "type": "number", + "description": "Nucleus sampling parameter (0.0 to 1.0)", + "minimum": 0.0, + "maximum": 1.0, + }, + "n": { + "type": "integer", + "description": "Number of completions to generate (1 to 10)", + "minimum": 1, + "maximum": 10, + }, + "stream": { + "type": "boolean", + "description": "Whether to stream the response", + "default": False, + }, + "stop": { + "type": "array", + "items": {"type": "string"}, + "description": "Stop sequences", + }, + "presence_penalty": { + "type": "number", + "description": "Presence penalty (-2.0 to 2.0)", + "minimum": -2.0, + "maximum": 2.0, + }, + "frequency_penalty": { + "type": "number", + "description": "Frequency penalty (-2.0 to 2.0)", + "minimum": -2.0, + "maximum": 2.0, + }, + "logit_bias": { + "type": "object", + "description": "Logit bias dictionary", + }, + "user": { + "type": "string", + "description": "User identifier", + }, + }, + "required": ["model", "messages"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_CHAT_COMPLETION"}), + ), + types.Tool( + name="openrouter_create_chat_completion_stream", + description="Create a streaming chat completion using OpenRouter", + inputSchema={ + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The model to use for completion", + }, + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "role": { + "type": "string", + "enum": ["system", "user", "assistant"], + }, + "content": { + "type": "string", + }, + }, + "required": ["role", "content"], + }, + }, + "max_tokens": { + "type": "integer", + "description": "Maximum number of tokens to generate", + }, + "temperature": { + "type": "number", + "minimum": 0.0, + "maximum": 2.0, + }, + "top_p": { + "type": "number", + "minimum": 0.0, + "maximum": 1.0, + }, + "stop": { + "type": "array", + "items": {"type": "string"}, + }, + "presence_penalty": { + "type": "number", + "minimum": -2.0, + "maximum": 2.0, + }, + "frequency_penalty": { + "type": "number", + "minimum": -2.0, + "maximum": 2.0, + }, + "logit_bias": { + "type": "object", + }, + "user": { + "type": "string", + }, + }, + "required": ["model", "messages"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_CHAT_COMPLETION"}), + ), + types.Tool( + name="openrouter_create_completion", + description="Create a text completion using OpenRouter (legacy completion endpoint)", + inputSchema={ + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The model to use for completion", + }, + "prompt": { + "type": "string", + "description": "The text prompt to complete", + }, + "max_tokens": { + "type": "integer", + "description": "Maximum number of tokens to generate", + }, + "temperature": { + "type": "number", + "minimum": 0.0, + "maximum": 2.0, + }, + "top_p": { + "type": "number", + "minimum": 0.0, + "maximum": 1.0, + }, + "n": { + "type": "integer", + "minimum": 1, + "maximum": 10, + }, + "stream": { + "type": "boolean", + "default": False, + }, + "stop": { + "type": "array", + "items": {"type": "string"}, + }, + "presence_penalty": { + "type": "number", + "minimum": -2.0, + "maximum": 2.0, + }, + "frequency_penalty": { + "type": "number", + "minimum": -2.0, + "maximum": 2.0, + }, + "logit_bias": { + "type": "object", + }, + "user": { + "type": "string", + }, + }, + "required": ["model", "prompt"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_CHAT_COMPLETION"}), + ), + + types.Tool( + name="openrouter_get_usage", + description="Get usage statistics for the authenticated user", + inputSchema={ + "type": "object", + "properties": { + "start_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD format (defaults to 30 days ago)", + }, + "end_date": { + "type": "string", + "description": "End date in YYYY-MM-DD format (defaults to today)", + }, + "limit": { + "type": "integer", + "description": "Maximum number of records to return (1-1000, default 100)", + "minimum": 1, + "maximum": 1000, + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_USAGE", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_user_profile", + description="Get the current user's profile information", + inputSchema={ + "type": "object", + "properties": {}, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_USER", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_credits", + description="Get the current user's credit balance", + inputSchema={ + "type": "object", + "properties": {}, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_METADATA", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_api_key_info", + description="Get information about the current API key", + inputSchema={ + "type": "object", + "properties": {}, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_METADATA", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_cost_estimate", + description="Estimate the cost for a specific model and token usage", + inputSchema={ + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The model ID to estimate costs for", + }, + "input_tokens": { + "type": "integer", + "description": "Number of input tokens", + "minimum": 0, + }, + "output_tokens": { + "type": "integer", + "description": "Number of output tokens (optional, defaults to 0)", + "minimum": 0, + }, + }, + "required": ["model", "input_tokens"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_USAGE", "readOnlyHint": True}), + ), + + types.Tool( + name="openrouter_compare_models", + description="Compare multiple models by running the same prompt through each", + inputSchema={ + "type": "object", + "properties": { + "models": { + "type": "array", + "items": {"type": "string"}, + "description": "List of model IDs to compare (2-5 models)", + "minItems": 2, + "maxItems": 5, + }, + "test_prompt": { + "type": "string", + "description": "The prompt to test with all models", + }, + "max_tokens": { + "type": "integer", + "description": "Maximum tokens to generate (default 100)", + }, + "temperature": { + "type": "number", + "description": "Sampling temperature (default 0.7)", + "minimum": 0.0, + "maximum": 2.0, + }, + }, + "required": ["models", "test_prompt"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_analyze_model_performance", + description="Analyze the performance of a single model across multiple test prompts", + inputSchema={ + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The model ID to analyze", + }, + "test_prompts": { + "type": "array", + "items": {"type": "string"}, + "description": "List of test prompts to use (max 10)", + "maxItems": 10, + }, + "max_tokens": { + "type": "integer", + "description": "Maximum tokens to generate (default 100)", + }, + "temperature": { + "type": "number", + "description": "Sampling temperature (default 0.7)", + "minimum": 0.0, + "maximum": 2.0, + }, + }, + "required": ["model", "test_prompts"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + types.Tool( + name="openrouter_get_model_recommendations", + description="Get model recommendations based on use case and constraints", + inputSchema={ + "type": "object", + "properties": { + "use_case": { + "type": "string", + "description": "Description of the intended use case", + }, + "budget_constraint": { + "type": "string", + "enum": ["low", "medium", "high", "unlimited"], + "description": "Budget constraint", + }, + "performance_priority": { + "type": "string", + "enum": ["speed", "quality", "balanced"], + "description": "Performance priority", + }, + }, + "required": ["use_case"], + }, + annotations=types.ToolAnnotations(**{"category": "OPENROUTER_MODEL", "readOnlyHint": True}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + try: + if name == "openrouter_list_models": + result = await model_tools.list_models(**arguments) + + elif name == "openrouter_search_models": + result = await model_tools.search_models(**arguments) + elif name == "openrouter_get_model_pricing": + result = await model_tools.get_model_pricing(**arguments) + + elif name == "openrouter_create_chat_completion": + result = await chat_tools.create_chat_completion(**arguments) + elif name == "openrouter_create_chat_completion_stream": + result = await chat_tools.create_chat_completion_stream(**arguments) + elif name == "openrouter_create_completion": + result = await chat_tools.create_completion(**arguments) + + elif name == "openrouter_get_usage": + result = await usage_tools.get_usage(**arguments) + elif name == "openrouter_get_user_profile": + result = await usage_tools.get_user_profile(**arguments) + elif name == "openrouter_get_credits": + result = await usage_tools.get_credits(**arguments) + elif name == "openrouter_get_api_key_info": + result = await usage_tools.get_api_key_info(**arguments) + elif name == "openrouter_get_cost_estimate": + result = await usage_tools.get_cost_estimate(**arguments) + + elif name == "openrouter_compare_models": + result = await comparison_tools.compare_models(**arguments) + elif name == "openrouter_analyze_model_performance": + result = await comparison_tools.analyze_model_performance(**arguments) + elif name == "openrouter_get_model_recommendations": + result = await comparison_tools.get_model_recommendations(**arguments) + else: + raise ValueError(f"Unknown tool: {name}") + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except OpenRouterToolExecutionError as e: + logger.error(f"Retryable error in {name}: {e}") + return [ + types.TextContent( + type="text", + text=json.dumps({ + "error": str(e), + "retry_after_ms": e.retry_after_ms, + "additional_prompt_content": e.additional_prompt_content, + "developer_message": e.developer_message, + }, indent=2) + ) + ] + except Exception as e: + logger.exception(f"Error in {name}: {e}") + return [ + types.TextContent( + type="text", + text=json.dumps({"error": str(e)}, indent=2) + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + # Set the API key in context for this request + token = auth_token_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + try: + uvicorn.run( + starlette_app, + host="0.0.0.0", + port=port, + log_level=log_level.lower(), + ) + return 0 + except Exception as e: + logger.exception(f"Failed to start server: {e}") + return 1 + + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/mcp_servers/openrouter/test_server.py b/mcp_servers/openrouter/test_server.py new file mode 100644 index 00000000..e714ae15 --- /dev/null +++ b/mcp_servers/openrouter/test_server.py @@ -0,0 +1,265 @@ +#!/usr/bin/env python3 +""" +Test script for OpenRouter MCP Server +""" + +import asyncio +import json +import os +import logging +import dotenv +from typing import Dict, Any + +dotenv.load_dotenv() + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +TEST_API_KEY = os.getenv("OPENROUTER_API_KEY") +BASE_URL = "http://localhost:8000/" + + +async def test_list_models() -> Dict[str, Any]: + """Test the list_models tool.""" + logger.info("Testing list_models...") + + from tools.models import list_models + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + result = await list_models(limit=5) + logger.info(f"List models result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + +async def test_chat_completion() -> Dict[str, Any]: + """Test the create_chat_completion tool.""" + logger.info("Testing create_chat_completion...") + + from tools.chat import create_chat_completion + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + messages = [ + {"role": "user", "content": "Hello, how are you?"} + ] + result = await create_chat_completion( + model="anthropic/claude-3-opus", + messages=messages, + max_tokens=50, + temperature=0.7 + ) + logger.info(f"Chat completion result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + + +async def test_chat_completion_stream() -> Dict[str, Any]: + """Test the create_chat_completion_stream tool.""" + logger.info("Testing create_chat_completion_stream...") + + from tools.chat import create_chat_completion_stream + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + messages = [ + {"role": "user", "content": "Write a short story about a robot in exactly 3 sentences."} + ] + result = await create_chat_completion_stream( + model="anthropic/claude-3-opus", + messages=messages, + max_tokens=72, + temperature=0.8 + ) + + logger.info("=== STREAMING CHAT COMPLETION TEST ===") + logger.info(f"Model: {result.get('model', 'Unknown')}") + logger.info(f"Stream enabled: {result.get('stream', False)}") + + if result.get('success'): + data = result.get('data', {}) + if data.get('stream') and data.get('generator'): + logger.info("āœ… Streaming generator received! Processing chunks in real-time...") + logger.info(f"Stream status: {data.get('message', 'No message')}") + + generator = data.get('generator') + all_chunks = [] + chunk_count = 0 + + logger.info("šŸš€ Starting to process stream chunks in real-time...") + + async for chunk_data in generator: + if chunk_data.get("is_complete"): + total_chunks = chunk_data.get("total_chunks", chunk_count) + logger.info(f"šŸŽÆ STREAM COMPLETED! Total chunks: {total_chunks}") + break + else: + chunk_content = chunk_data.get("chunk") + if chunk_content: + all_chunks.append(chunk_content) + chunk_count += 1 + logger.info(f"šŸ“¦ Chunk {chunk_count}: '{chunk_content}'") + + final_content = ''.join(all_chunks) + logger.info(f"šŸ“ Final content length: {len(final_content)}") + logger.info(f"šŸ“ Final content: {final_content}") + + usage = result.get('usage', {}) + logger.info(f"šŸ’³ Token usage: {usage}") + + elif data.get('choices'): + choices = data['choices'] + if choices: + content = choices[0].get('message', {}).get('content', '') + logger.info(f"Response content: {content}") + + usage = result.get('usage', {}) + logger.info(f"Token usage: {usage}") + else: + logger.info(f"Response data: {data}") + + logger.info("=== END STREAMING TEST ===") + return result + finally: + auth_token_context.reset(token) + + +async def test_user_profile() -> Dict[str, Any]: + """Test the get_user_profile tool.""" + logger.info("Testing get_user_profile...") + + from tools.usage import get_user_profile + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + result = await get_user_profile() + logger.info(f"User profile result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + + +async def test_get_credits() -> Dict[str, Any]: + """Test the get_credits tool.""" + logger.info("Testing get_credits...") + + from tools.usage import get_credits + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + result = await get_credits() + logger.info(f"Credits result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + + +async def test_model_comparison() -> Dict[str, Any]: + """Test the compare_models tool.""" + logger.info("Testing compare_models...") + + from tools.comparison import compare_models + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + models = ["anthropic/claude-3-opus", "openai/gpt-4"] + result = await compare_models( + models=models, + test_prompt="Explain quantum computing in simple terms", + max_tokens=100, + temperature=0.7 + ) + logger.info(f"Model comparison result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + + +async def test_model_recommendations() -> Dict[str, Any]: + """Test the get_model_recommendations tool.""" + logger.info("Testing get_model_recommendations...") + + from tools.comparison import get_model_recommendations + from tools.base import auth_token_context + + token = auth_token_context.set(TEST_API_KEY) + try: + result = await get_model_recommendations( + use_case="Text generation for a chatbot", + budget_constraint="medium", + performance_priority="balanced" + ) + logger.info(f"Model recommendations result: {json.dumps(result, indent=2)}") + return result + finally: + auth_token_context.reset(token) + + +async def run_all_tests(): + """Run all tests.""" + logger.info("Starting OpenRouter MCP Server tests...") + + tests = [ + ("List Models", test_list_models), + ("Chat Completion", test_chat_completion), + ("Chat Completion Stream", test_chat_completion_stream), + ("User Profile", test_user_profile), + ("Get Credits", test_get_credits), + ("Model Comparison", test_model_comparison), + ("Model Recommendations", test_model_recommendations), + ] + + results = {} + + for test_name, test_func in tests: + try: + logger.info(f"\n{'='*50}") + logger.info(f"Running test: {test_name}") + logger.info(f"{'='*50}") + + result = await test_func() + results[test_name] = {"status": "PASSED", "result": result} + + except Exception as e: + logger.error(f"Test {test_name} FAILED: {str(e)}") + results[test_name] = {"status": "FAILED", "error": str(e)} + + # Print summary + logger.info(f"\n{'='*50}") + logger.info("TEST SUMMARY") + logger.info(f"{'='*50}") + + passed = 0 + failed = 0 + + for test_name, result in results.items(): + status = result["status"] + if status == "PASSED": + passed += 1 + logger.info(f"āœ… {test_name}: PASSED") + else: + failed += 1 + logger.error(f"āŒ {test_name}: FAILED - {result.get('error', 'Unknown error')}") + + logger.info(f"\nTotal: {passed + failed}, Passed: {passed}, Failed: {failed}") + + if failed == 0: + logger.info("šŸŽ‰ All tests passed!") + else: + logger.error(f"šŸ’„ {failed} test(s) failed!") + + return results + + +if __name__ == "__main__": + # Run the tests + asyncio.run(run_all_tests()) \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/__init__.py b/mcp_servers/openrouter/tools/__init__.py new file mode 100644 index 00000000..0dbe8b8b --- /dev/null +++ b/mcp_servers/openrouter/tools/__init__.py @@ -0,0 +1,18 @@ +# OpenRouter MCP Server Tools + +from .base import OpenRouterToolExecutionError, auth_token_context +from .models import list_models +from .chat import create_chat_completion, create_chat_completion_stream +from .usage import get_usage, get_user_profile +from .comparison import compare_models + +__all__ = [ + "OpenRouterToolExecutionError", + "auth_token_context", + "list_models", + "create_chat_completion", + "create_chat_completion_stream", + "get_usage", + "get_user_profile", + "compare_models", +] \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/base.py b/mcp_servers/openrouter/tools/base.py new file mode 100644 index 00000000..91c6bd33 --- /dev/null +++ b/mcp_servers/openrouter/tools/base.py @@ -0,0 +1,278 @@ +""" +Base utilities and error handling for OpenRouter MCP Server. +""" + +import contextvars +import logging +from typing import Optional, Dict, Any +import httpx +from pydantic import BaseModel, Field +import json + +logger = logging.getLogger(__name__) + +auth_token_context = contextvars.ContextVar("auth_token", default="") + +OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1" + + +class OpenRouterToolExecutionError(Exception): + """Custom exception for OpenRouter tool execution errors.""" + + def __init__( + self, + message: str, + retry_after_ms: Optional[int] = None, + additional_prompt_content: Optional[str] = None, + developer_message: Optional[str] = None, + ): + super().__init__(message) + self.retry_after_ms = retry_after_ms + self.additional_prompt_content = additional_prompt_content + self.developer_message = developer_message + + +class OpenRouterAPIError(Exception): + """Exception for OpenRouter API errors.""" + + def __init__(self, status_code: int, message: str, details: Optional[Dict[str, Any]] = None): + super().__init__(message) + self.status_code = status_code + self.details = details or {} + + +class OpenRouterClient: + """HTTP client for OpenRouter API.""" + + def __init__(self, api_key: str): + self.api_key = api_key + self.base_url = OPENROUTER_BASE_URL + self.headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", + "HTTP-Referer": "https://klavis.ai/", + "X-Title": "Klavis MCP Server", + } + + async def _make_request( + self, + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """Make an HTTP request to the OpenRouter API.""" + url = f"{self.base_url}{endpoint}" + + async with httpx.AsyncClient(timeout=30.0) as client: + try: + response = await client.request( + method=method, + url=url, + headers=self.headers, + json=data, + params=params, + ) + + if response.status_code == 429: + retry_after = response.headers.get("Retry-After") + retry_after_ms = int(retry_after) * 1000 if retry_after else 60000 + raise OpenRouterToolExecutionError( + "Rate limit exceeded", + retry_after_ms=retry_after_ms, + additional_prompt_content="Please wait before making another request.", + developer_message=f"Rate limited. Retry after {retry_after_ms}ms", + ) + + if response.status_code >= 400: + error_data = response.json() if response.content else {} + error_message = error_data.get("error", {}).get("message", "Unknown error") + raise OpenRouterAPIError( + status_code=response.status_code, + message=error_message, + details=error_data, + ) + + if data and data.get("stream") and response.status_code == 200: + # For streaming responses, return a generator that yields chunks + async def stream_generator(): + try: + chunk_count = 0 + async for chunk in response.aiter_text(): + lines = chunk.split('\n') + for line in lines: + if line.startswith('data: '): + data_line = line[6:] + if data_line.strip() == '[DONE]': + logger.info("Received [DONE] marker - stream complete") + yield { + "chunk": None, + "is_complete": True, + "total_chunks": chunk_count, + "message": "Stream completed" + } + return + try: + json_data = json.loads(data_line) + if 'choices' in json_data and json_data['choices']: + delta = json_data['choices'][0].get('delta', {}) + if 'content' in delta: + chunk_count += 1 + content = delta['content'] + logger.info(f"Yielding chunk {chunk_count}: '{content}'") + yield { + "chunk": content, + "is_complete": False, + "chunk_number": chunk_count, + "message": f"Chunk {chunk_count} received" + } + except json.JSONDecodeError: + continue + + yield { + "chunk": None, + "is_complete": True, + "total_chunks": chunk_count, + "message": "Stream ended without [DONE] marker" + } + + except Exception as e: + logger.error(f"Error in stream generator: {e}") + yield { + "chunk": None, + "is_complete": True, + "error": str(e), + "message": f"Stream error: {str(e)}" + } + + return { + "stream": True, + "status_code": response.status_code, + "generator": stream_generator(), + "message": "Stream generator created" + } + + # Try to parse as JSON, but handle empty responses gracefully + if response.content: + return response.json() + else: + return {"status": "success", "message": "Empty response received"} + + except httpx.TimeoutException: + raise OpenRouterToolExecutionError( + "Request timeout", + retry_after_ms=5000, + additional_prompt_content="The request took too long. Please try again.", + developer_message="Request timed out after 30 seconds", + ) + except httpx.RequestError as e: + raise OpenRouterToolExecutionError( + f"Network error: {str(e)}", + retry_after_ms=5000, + additional_prompt_content="There was a network error. Please check your connection and try again.", + developer_message=f"Network error: {str(e)}", + ) + except Exception as e: + if "Expecting value" in str(e) and data and data.get("stream"): + return { + "stream": True, + "status_code": response.status_code, + "content": "Streaming response received", + "message": "This is a streaming response. Use appropriate streaming client to handle it." + } + raise + + async def get(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Make a GET request.""" + return await self._make_request("GET", endpoint, params=params) + + async def post(self, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]: + """Make a POST request.""" + return await self._make_request("POST", endpoint, data=data) + + +def get_client() -> OpenRouterClient: + """Get an OpenRouter client with the current auth token.""" + api_key = auth_token_context.get() + if not api_key: + raise OpenRouterToolExecutionError( + "Missing API key", + additional_prompt_content="Please provide your OpenRouter API key in the x-auth-token header.", + developer_message="No API key provided in auth_token_context", + ) + return OpenRouterClient(api_key) + + +def validate_required_params(params: Dict[str, Any], required: list[str]) -> None: + """Validate that required parameters are present.""" + missing = [param for param in required if param not in params or params[param] is None] + if missing: + raise OpenRouterToolExecutionError( + f"Missing required parameters: {', '.join(missing)}", + additional_prompt_content=f"Please provide the following required parameters: {', '.join(missing)}", + developer_message=f"Missing required parameters: {missing}", + ) + + +def validate_model_id(model_id: str) -> None: + """Validate model ID format.""" + if not model_id or not isinstance(model_id, str): + raise OpenRouterToolExecutionError( + "Invalid model ID", + additional_prompt_content="Please provide a valid model ID (e.g., 'anthropic/claude-3-opus').", + developer_message="Model ID must be a non-empty string", + ) + + +def validate_messages(messages: list) -> None: + """Validate chat messages format.""" + if not messages or not isinstance(messages, list): + raise OpenRouterToolExecutionError( + "Invalid messages format", + additional_prompt_content="Messages must be a non-empty list of message objects.", + developer_message="Messages must be a non-empty list", + ) + + for i, message in enumerate(messages): + if not isinstance(message, dict): + raise OpenRouterToolExecutionError( + f"Invalid message format at index {i}", + additional_prompt_content="Each message must be an object with 'role' and 'content' fields.", + developer_message=f"Message at index {i} is not a dictionary", + ) + + if "role" not in message or "content" not in message: + raise OpenRouterToolExecutionError( + f"Missing required fields in message at index {i}", + additional_prompt_content="Each message must have 'role' and 'content' fields.", + developer_message=f"Message at index {i} missing required fields", + ) + + if message["role"] not in ["system", "user", "assistant"]: + raise OpenRouterToolExecutionError( + f"Invalid role '{message['role']}' in message at index {i}", + additional_prompt_content="Message role must be 'system', 'user', or 'assistant'.", + developer_message=f"Invalid role: {message['role']}", + ) + + +class ChatMessage(BaseModel): + """Chat message model.""" + role: str = Field(..., description="The role of the message sender") + content: str = Field(..., description="The content of the message") + + +class ChatCompletionRequest(BaseModel): + """Chat completion request model.""" + model: str = Field(..., description="The model to use for completion") + messages: list[ChatMessage] = Field(..., description="The messages to complete") + max_tokens: Optional[int] = Field(None, description="Maximum number of tokens to generate") + temperature: Optional[float] = Field(None, ge=0.0, le=2.0, description="Sampling temperature") + top_p: Optional[float] = Field(None, ge=0.0, le=1.0, description="Nucleus sampling parameter") + n: Optional[int] = Field(None, ge=1, le=10, description="Number of completions to generate") + stream: Optional[bool] = Field(False, description="Whether to stream the response") + stop: Optional[list[str]] = Field(None, description="Stop sequences") + presence_penalty: Optional[float] = Field(None, ge=-2.0, le=2.0, description="Presence penalty") + frequency_penalty: Optional[float] = Field(None, ge=-2.0, le=2.0, description="Frequency penalty") + logit_bias: Optional[Dict[str, float]] = Field(None, description="Logit bias") + user: Optional[str] = Field(None, description="User identifier") \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/chat.py b/mcp_servers/openrouter/tools/chat.py new file mode 100644 index 00000000..5de9151c --- /dev/null +++ b/mcp_servers/openrouter/tools/chat.py @@ -0,0 +1,411 @@ +""" +Chat completion tools for OpenRouter MCP Server. +""" + +import logging +from typing import Dict, Any, Optional, List +from .base import ( + get_client, + validate_required_params, + validate_model_id, + validate_messages, + OpenRouterToolExecutionError, +) + +logger = logging.getLogger(__name__) + + +async def create_chat_completion( + model: str, + messages: List[Dict[str, str]], + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + n: Optional[int] = None, + stream: bool = False, + stop: Optional[List[str]] = None, + presence_penalty: Optional[float] = None, + frequency_penalty: Optional[float] = None, + logit_bias: Optional[Dict[str, float]] = None, + user: Optional[str] = None, +) -> Dict[str, Any]: + """ + Create a chat completion using OpenRouter. + + Args: + model: The model to use for completion + messages: List of message objects with 'role' and 'content' + max_tokens: Maximum number of tokens to generate + temperature: Sampling temperature (0.0 to 2.0) + top_p: Nucleus sampling parameter (0.0 to 1.0) + n: Number of completions to generate (1 to 10) + stream: Whether to stream the response + stop: Stop sequences + presence_penalty: Presence penalty (-2.0 to 2.0) + frequency_penalty: Frequency penalty (-2.0 to 2.0) + logit_bias: Logit bias dictionary + user: User identifier + + Returns: + Dictionary containing the completion response + """ + try: + validate_required_params({"model": model, "messages": messages}, ["model", "messages"]) + validate_model_id(model) + validate_messages(messages) + + if temperature is not None and (temperature < 0.0 or temperature > 2.0): + raise OpenRouterToolExecutionError( + "Invalid temperature parameter", + additional_prompt_content="Temperature must be between 0.0 and 2.0.", + developer_message=f"Invalid temperature: {temperature}", + ) + + if top_p is not None and (top_p < 0.0 or top_p > 1.0): + raise OpenRouterToolExecutionError( + "Invalid top_p parameter", + additional_prompt_content="Top_p must be between 0.0 and 1.0.", + developer_message=f"Invalid top_p: {top_p}", + ) + + if n is not None and (n < 1 or n > 10): + raise OpenRouterToolExecutionError( + "Invalid n parameter", + additional_prompt_content="N must be between 1 and 10.", + developer_message=f"Invalid n: {n}", + ) + + if presence_penalty is not None and (presence_penalty < -2.0 or presence_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid presence_penalty parameter", + additional_prompt_content="Presence penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid presence_penalty: {presence_penalty}", + ) + + if frequency_penalty is not None and (frequency_penalty < -2.0 or frequency_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid frequency_penalty parameter", + additional_prompt_content="Frequency penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid frequency_penalty: {frequency_penalty}", + ) + + client = get_client() + + request_data = { + "model": model, + "messages": messages, + } + + if max_tokens is not None: + request_data["max_tokens"] = max_tokens + if temperature is not None: + request_data["temperature"] = temperature + if top_p is not None: + request_data["top_p"] = top_p + if n is not None: + request_data["n"] = n + if stream: + request_data["stream"] = stream + if stop: + request_data["stop"] = stop + if presence_penalty is not None: + request_data["presence_penalty"] = presence_penalty + if frequency_penalty is not None: + request_data["frequency_penalty"] = frequency_penalty + if logit_bias: + request_data["logit_bias"] = logit_bias + if user: + request_data["user"] = user + + response = await client.post("/chat/completions", request_data) + + logger.info(f"Successfully created chat completion with model: {model}") + + return { + "success": True, + "data": response, + "model": model, + "usage": response.get("usage", {}), + "choices": response.get("choices", []), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error creating chat completion: {e}") + raise OpenRouterToolExecutionError( + f"Failed to create chat completion: {str(e)}", + additional_prompt_content="There was an error creating the chat completion. Please check your parameters and try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def create_chat_completion_stream( + model: str, + messages: List[Dict[str, str]], + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + stop: Optional[List[str]] = None, + presence_penalty: Optional[float] = None, + frequency_penalty: Optional[float] = None, + logit_bias: Optional[Dict[str, float]] = None, + user: Optional[str] = None, +) -> Dict[str, Any]: + """ + Create a streaming chat completion using OpenRouter. + + Args: + model: The model to use for completion + messages: List of message objects with 'role' and 'content' + max_tokens: Maximum number of tokens to generate + temperature: Sampling temperature (0.0 to 2.0) + top_p: Nucleus sampling parameter (0.0 to 1.0) + stop: Stop sequences + presence_penalty: Presence penalty (-2.0 to 2.0) + frequency_penalty: Frequency penalty (-2.0 to 2.0) + logit_bias: Logit bias dictionary + user: User identifier + + Returns: + Dictionary containing the streaming completion response + """ + try: + validate_required_params({"model": model, "messages": messages}, ["model", "messages"]) + validate_model_id(model) + validate_messages(messages) + + if temperature is not None and (temperature < 0.0 or temperature > 2.0): + raise OpenRouterToolExecutionError( + "Invalid temperature parameter", + additional_prompt_content="Temperature must be between 0.0 and 2.0.", + developer_message=f"Invalid temperature: {temperature}", + ) + + if top_p is not None and (top_p < 0.0 or top_p > 1.0): + raise OpenRouterToolExecutionError( + "Invalid top_p parameter", + additional_prompt_content="Top_p must be between 0.0 and 1.0.", + developer_message=f"Invalid top_p: {top_p}", + ) + + if presence_penalty is not None and (presence_penalty < -2.0 or presence_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid presence_penalty parameter", + additional_prompt_content="Presence penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid presence_penalty: {presence_penalty}", + ) + + if frequency_penalty is not None and (frequency_penalty < -2.0 or frequency_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid frequency_penalty parameter", + additional_prompt_content="Frequency penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid frequency_penalty: {frequency_penalty}", + ) + + client = get_client() + + request_data = { + "model": model, + "messages": messages, + "stream": True, + } + + if max_tokens is not None: + request_data["max_tokens"] = max_tokens + if temperature is not None: + request_data["temperature"] = temperature + if top_p is not None: + request_data["top_p"] = top_p + if stop: + request_data["stop"] = stop + if presence_penalty is not None: + request_data["presence_penalty"] = presence_penalty + if frequency_penalty is not None: + request_data["frequency_penalty"] = frequency_penalty + if logit_bias: + request_data["logit_bias"] = logit_bias + if user: + request_data["user"] = user + + response = await client.post("/chat/completions", request_data) + + logger.info(f"Successfully created streaming chat completion with model: {model}") + + if response.get("stream"): + logger.info("Returning streaming generator to caller...") + + # Get the stream generator and return it directly + generator = response.get("generator") + if generator: + return { + "success": True, + "data": { + "stream": True, + "generator": generator, + "message": "Stream generator ready for processing" + }, + "model": model, + "stream": True, + "usage": None, + "choices": [], + } + else: + return { + "success": True, + "data": response, + "model": model, + "stream": True, + "usage": response.get("usage", {}), + "choices": response.get("choices", []), + } + else: + return { + "success": True, + "data": response, + "model": model, + "stream": True, + "usage": response.get("usage", {}), + "choices": response.get("choices", []), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error creating streaming chat completion: {e}") + raise OpenRouterToolExecutionError( + f"Failed to create streaming chat completion: {str(e)}", + additional_prompt_content="There was an error creating the streaming chat completion. Please check your parameters and try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def create_completion( + model: str, + prompt: str, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + n: Optional[int] = None, + stream: bool = False, + stop: Optional[List[str]] = None, + presence_penalty: Optional[float] = None, + frequency_penalty: Optional[float] = None, + logit_bias: Optional[Dict[str, float]] = None, + user: Optional[str] = None, +) -> Dict[str, Any]: + """ + Create a text completion using OpenRouter (legacy completion endpoint). + + Args: + model: The model to use for completion + prompt: The text prompt to complete + max_tokens: Maximum number of tokens to generate + temperature: Sampling temperature (0.0 to 2.0) + top_p: Nucleus sampling parameter (0.0 to 1.0) + n: Number of completions to generate (1 to 10) + stream: Whether to stream the response + stop: Stop sequences + presence_penalty: Presence penalty (-2.0 to 2.0) + frequency_penalty: Frequency penalty (-2.0 to 2.0) + logit_bias: Logit bias dictionary + user: User identifier + + Returns: + Dictionary containing the completion response + """ + try: + validate_required_params({"model": model, "prompt": prompt}, ["model", "prompt"]) + validate_model_id(model) + + if not prompt or not isinstance(prompt, str): + raise OpenRouterToolExecutionError( + "Invalid prompt parameter", + additional_prompt_content="Prompt must be a non-empty string.", + developer_message="Prompt must be a non-empty string", + ) + + if temperature is not None and (temperature < 0.0 or temperature > 2.0): + raise OpenRouterToolExecutionError( + "Invalid temperature parameter", + additional_prompt_content="Temperature must be between 0.0 and 2.0.", + developer_message=f"Invalid temperature: {temperature}", + ) + + if top_p is not None and (top_p < 0.0 or top_p > 1.0): + raise OpenRouterToolExecutionError( + "Invalid top_p parameter", + additional_prompt_content="Top_p must be between 0.0 and 1.0.", + developer_message=f"Invalid top_p: {top_p}", + ) + + if n is not None and (n < 1 or n > 10): + raise OpenRouterToolExecutionError( + "Invalid n parameter", + additional_prompt_content="N must be between 1 and 10.", + developer_message=f"Invalid n: {n}", + ) + + if presence_penalty is not None and (presence_penalty < -2.0 or presence_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid presence_penalty parameter", + additional_prompt_content="Presence penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid presence_penalty: {presence_penalty}", + ) + + if frequency_penalty is not None and (frequency_penalty < -2.0 or frequency_penalty > 2.0): + raise OpenRouterToolExecutionError( + "Invalid frequency_penalty parameter", + additional_prompt_content="Frequency penalty must be between -2.0 and 2.0.", + developer_message=f"Invalid frequency_penalty: {frequency_penalty}", + ) + + client = get_client() + + request_data = { + "model": model, + "prompt": prompt, + } + + if max_tokens is not None: + request_data["max_tokens"] = max_tokens + if temperature is not None: + request_data["temperature"] = temperature + if top_p is not None: + request_data["top_p"] = top_p + if n is not None: + request_data["n"] = n + if stream: + request_data["stream"] = stream + if stop: + request_data["stop"] = stop + if presence_penalty is not None: + request_data["presence_penalty"] = presence_penalty + if frequency_penalty is not None: + request_data["frequency_penalty"] = frequency_penalty + if logit_bias: + request_data["logit_bias"] = logit_bias + if user: + request_data["user"] = user + + response = await client.post("/completions", request_data) + + logger.info(f"Successfully created completion with model: {model}") + + return { + "success": True, + "data": response, + "model": model, + "usage": response.get("usage", {}), + "choices": response.get("choices", []), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error creating completion: {e}") + raise OpenRouterToolExecutionError( + f"Failed to create completion: {str(e)}", + additional_prompt_content="There was an error creating the completion. Please check your parameters and try again.", + developer_message=f"Unexpected error: {str(e)}", + ) \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/comparison.py b/mcp_servers/openrouter/tools/comparison.py new file mode 100644 index 00000000..ee2bb727 --- /dev/null +++ b/mcp_servers/openrouter/tools/comparison.py @@ -0,0 +1,425 @@ +""" +Model comparison and analysis tools for OpenRouter MCP Server. +""" + +import logging +from typing import Dict, Any, List, Optional +from .base import get_client, validate_required_params, validate_model_id, OpenRouterToolExecutionError + +logger = logging.getLogger(__name__) + + +async def compare_models( + models: List[str], + test_prompt: str, + max_tokens: Optional[int] = 100, + temperature: Optional[float] = 0.7, +) -> Dict[str, Any]: + """ + Compare multiple models by running the same prompt through each. + + Args: + models: List of model IDs to compare + test_prompt: The prompt to test with all models + max_tokens: Maximum tokens to generate (default 100) + temperature: Sampling temperature (default 0.7) + + Returns: + Dictionary containing comparison results + """ + try: + validate_required_params({"models": models, "test_prompt": test_prompt}, ["models", "test_prompt"]) + + if not models or not isinstance(models, list): + raise OpenRouterToolExecutionError( + "Invalid models parameter", + additional_prompt_content="Models must be a non-empty list of model IDs.", + developer_message="Models must be a non-empty list", + ) + + if len(models) < 2: + raise OpenRouterToolExecutionError( + "At least 2 models required for comparison", + additional_prompt_content="Please provide at least 2 models to compare.", + developer_message="Need at least 2 models for comparison", + ) + + if len(models) > 5: + raise OpenRouterToolExecutionError( + "Too many models for comparison", + additional_prompt_content="Please provide no more than 5 models to compare.", + developer_message="Too many models for comparison", + ) + + for model_id in models: + validate_model_id(model_id) + + if not test_prompt or not isinstance(test_prompt, str): + raise OpenRouterToolExecutionError( + "Invalid test_prompt parameter", + additional_prompt_content="Test prompt must be a non-empty string.", + developer_message="Test prompt must be a non-empty string", + ) + + if temperature is not None and (temperature < 0.0 or temperature > 2.0): + raise OpenRouterToolExecutionError( + "Invalid temperature parameter", + additional_prompt_content="Temperature must be between 0.0 and 2.0.", + developer_message=f"Invalid temperature: {temperature}", + ) + + client = get_client() + + messages = [{"role": "user", "content": test_prompt}] + + results = [] + for model_id in models: + try: + request_data = { + "model": model_id, + "messages": messages, + "max_tokens": max_tokens or 100, + "temperature": temperature or 0.7, + } + + response = await client.post("/chat/completions", request_data) + + choices = response.get("choices", []) + content = choices[0].get("message", {}).get("content", "") if choices else "" + usage = response.get("usage", {}) + + results.append({ + "model": model_id, + "success": True, + "response": content, + "usage": { + "prompt_tokens": usage.get("prompt_tokens", 0), + "completion_tokens": usage.get("completion_tokens", 0), + "total_tokens": usage.get("total_tokens", 0), + }, + "error": None, + }) + + logger.info(f"Successfully tested model: {model_id}") + + except Exception as e: + logger.warning(f"Error testing model {model_id}: {e}") + results.append({ + "model": model_id, + "success": False, + "response": None, + "usage": None, + "error": str(e), + }) + + successful_results = [r for r in results if r["success"]] + + comparison_summary = { + "total_models": len(models), + "successful_tests": len(successful_results), + "failed_tests": len(results) - len(successful_results), + "test_prompt": test_prompt, + "parameters": { + "max_tokens": max_tokens or 100, + "temperature": temperature or 0.7, + }, + } + + if successful_results: + total_tokens = [r["usage"]["total_tokens"] for r in successful_results] + completion_tokens = [r["usage"]["completion_tokens"] for r in successful_results] + + comparison_summary["token_usage"] = { + "min_total_tokens": min(total_tokens), + "max_total_tokens": max(total_tokens), + "avg_total_tokens": sum(total_tokens) / len(total_tokens), + "min_completion_tokens": min(completion_tokens), + "max_completion_tokens": max(completion_tokens), + "avg_completion_tokens": sum(completion_tokens) / len(completion_tokens), + } + + logger.info(f"Successfully compared {len(successful_results)} models") + + return { + "success": True, + "comparison_summary": comparison_summary, + "results": results, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error comparing models: {e}") + raise OpenRouterToolExecutionError( + f"Failed to compare models: {str(e)}", + additional_prompt_content="There was an error comparing the models. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def analyze_model_performance( + model: str, + test_prompts: List[str], + max_tokens: Optional[int] = 100, + temperature: Optional[float] = 0.7, +) -> Dict[str, Any]: + """ + Analyze the performance of a single model across multiple test prompts. + + Args: + model: The model ID to analyze + test_prompts: List of test prompts to use + max_tokens: Maximum tokens to generate (default 100) + temperature: Sampling temperature (default 0.7) + + Returns: + Dictionary containing performance analysis + """ + try: + validate_required_params({"model": model, "test_prompts": test_prompts}, ["model", "test_prompts"]) + validate_model_id(model) + + if not test_prompts or not isinstance(test_prompts, list): + raise OpenRouterToolExecutionError( + "Invalid test_prompts parameter", + additional_prompt_content="Test prompts must be a non-empty list of strings.", + developer_message="Test prompts must be a non-empty list", + ) + + if len(test_prompts) > 10: + raise OpenRouterToolExecutionError( + "Too many test prompts", + additional_prompt_content="Please provide no more than 10 test prompts.", + developer_message="Too many test prompts", + ) + + for i, prompt in enumerate(test_prompts): + if not prompt or not isinstance(prompt, str): + raise OpenRouterToolExecutionError( + f"Invalid prompt at index {i}", + additional_prompt_content=f"Prompt at index {i} must be a non-empty string.", + developer_message=f"Invalid prompt at index {i}", + ) + + if temperature is not None and (temperature < 0.0 or temperature > 2.0): + raise OpenRouterToolExecutionError( + "Invalid temperature parameter", + additional_prompt_content="Temperature must be between 0.0 and 2.0.", + developer_message=f"Invalid temperature: {temperature}", + ) + + client = get_client() + + results = [] + total_prompt_tokens = 0 + total_completion_tokens = 0 + total_total_tokens = 0 + + for i, prompt in enumerate(test_prompts): + try: + messages = [{"role": "user", "content": prompt}] + + request_data = { + "model": model, + "messages": messages, + "max_tokens": max_tokens or 100, + "temperature": temperature or 0.7, + } + + response = await client.post("/chat/completions", request_data) + + choices = response.get("choices", []) + content = choices[0].get("message", {}).get("content", "") if choices else "" + usage = response.get("usage", {}) + + prompt_tokens = usage.get("prompt_tokens", 0) + completion_tokens = usage.get("completion_tokens", 0) + total_tokens = usage.get("total_tokens", 0) + + total_prompt_tokens += prompt_tokens + total_completion_tokens += completion_tokens + total_total_tokens += total_tokens + + results.append({ + "prompt_index": i, + "prompt": prompt, + "success": True, + "response": content, + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens, + "total_tokens": total_tokens, + }, + "error": None, + }) + + logger.info(f"Successfully tested prompt {i+1}/{len(test_prompts)} with model: {model}") + + except Exception as e: + logger.warning(f"Error testing prompt {i} with model {model}: {e}") + results.append({ + "prompt_index": i, + "prompt": prompt, + "success": False, + "response": None, + "usage": None, + "error": str(e), + }) + + successful_results = [r for r in results if r["success"]] + + performance_summary = { + "model": model, + "total_prompts": len(test_prompts), + "successful_tests": len(successful_results), + "failed_tests": len(results) - len(successful_results), + "success_rate": len(successful_results) / len(test_prompts) if test_prompts else 0, + "parameters": { + "max_tokens": max_tokens or 100, + "temperature": temperature or 0.7, + }, + "total_usage": { + "prompt_tokens": total_prompt_tokens, + "completion_tokens": total_completion_tokens, + "total_tokens": total_total_tokens, + }, + } + + if successful_results: + avg_prompt_tokens = total_prompt_tokens / len(successful_results) + avg_completion_tokens = total_completion_tokens / len(successful_results) + avg_total_tokens = total_total_tokens / len(successful_results) + + performance_summary["average_usage"] = { + "prompt_tokens": avg_prompt_tokens, + "completion_tokens": avg_completion_tokens, + "total_tokens": avg_total_tokens, + } + + logger.info(f"Successfully analyzed model {model} performance across {len(successful_results)} prompts") + + return { + "success": True, + "performance_summary": performance_summary, + "results": results, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error analyzing model performance: {e}") + raise OpenRouterToolExecutionError( + f"Failed to analyze model performance: {str(e)}", + additional_prompt_content="There was an error analyzing the model performance. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_model_recommendations( + use_case: str, + budget_constraint: Optional[str] = None, + performance_priority: Optional[str] = None, +) -> Dict[str, Any]: + """ + Get model recommendations based on use case and constraints. + + Args: + use_case: Description of the intended use case + budget_constraint: Budget constraint ('low', 'medium', 'high', 'unlimited') + performance_priority: Performance priority ('speed', 'quality', 'balanced') + + Returns: + Dictionary containing model recommendations + """ + try: + validate_required_params({"use_case": use_case}, ["use_case"]) + + if not use_case or not isinstance(use_case, str): + raise OpenRouterToolExecutionError( + "Invalid use_case parameter", + additional_prompt_content="Use case must be a non-empty string describing your needs.", + developer_message="Use case must be a non-empty string", + ) + + valid_budgets = ["low", "medium", "high", "unlimited"] + if budget_constraint and budget_constraint not in valid_budgets: + raise OpenRouterToolExecutionError( + "Invalid budget_constraint parameter", + additional_prompt_content=f"Budget constraint must be one of: {', '.join(valid_budgets)}.", + developer_message=f"Invalid budget_constraint: {budget_constraint}", + ) + + valid_priorities = ["speed", "quality", "balanced"] + if performance_priority and performance_priority not in valid_priorities: + raise OpenRouterToolExecutionError( + "Invalid performance_priority parameter", + additional_prompt_content=f"Performance priority must be one of: {', '.join(valid_priorities)}.", + developer_message=f"Invalid performance_priority: {performance_priority}", + ) + + client = get_client() + + models_response = await client.get("/models") + all_models = models_response.get("data", []) + + recommendations = [] + + for model in all_models: + model_id = model.get("id", "") + pricing = model.get("pricing", {}) + input_cost = pricing.get("input", 0) + output_cost = pricing.get("output", 0) + total_cost = input_cost + output_cost + + if budget_constraint: + if budget_constraint == "low" and total_cost > 0.001: + continue + elif budget_constraint == "medium" and total_cost > 0.01: + continue + elif budget_constraint == "high" and total_cost > 0.1: + continue + + if performance_priority: + model_name = model_id.lower() + if performance_priority == "speed" and "gpt-4" in model_name: + continue + elif performance_priority == "quality" and "gpt-3.5" in model_name: + continue + + recommendations.append({ + "model_id": model_id, + "name": model.get("name", model_id), + "description": model.get("description", ""), + "pricing": { + "input_cost_per_1k": input_cost, + "output_cost_per_1k": output_cost, + "total_cost_per_1k": total_cost, + }, + "context_length": model.get("context_length", 0), + "category": model.get("category", ""), + }) + + recommendations.sort(key=lambda x: x["pricing"]["total_cost_per_1k"]) + + recommendations = recommendations[:10] + + logger.info(f"Generated {len(recommendations)} model recommendations for use case: {use_case}") + + return { + "success": True, + "use_case": use_case, + "budget_constraint": budget_constraint, + "performance_priority": performance_priority, + "recommendations": recommendations, + "total_models_considered": len(all_models), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting model recommendations: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get model recommendations: {str(e)}", + additional_prompt_content="There was an error generating model recommendations. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/models.py b/mcp_servers/openrouter/tools/models.py new file mode 100644 index 00000000..4ed98a09 --- /dev/null +++ b/mcp_servers/openrouter/tools/models.py @@ -0,0 +1,172 @@ +""" +Model management tools for OpenRouter MCP Server. +""" + +import logging +from typing import Dict, Any, Optional +from .base import get_client, validate_required_params, validate_model_id, OpenRouterToolExecutionError + +logger = logging.getLogger(__name__) + + +async def list_models(limit: Optional[int] = 50, next_page_token: Optional[str] = None) -> Dict[str, Any]: + """ + List available models on OpenRouter. + + Args: + limit: Maximum number of models to return (1-100, default 50) + next_page_token: Token for pagination + + Returns: + Dictionary containing models and pagination info + """ + try: + if limit is not None and (limit < 1 or limit > 100): + raise OpenRouterToolExecutionError( + "Invalid limit parameter", + additional_prompt_content="Limit must be between 1 and 100.", + developer_message=f"Invalid limit: {limit}", + ) + + client = get_client() + + params = {} + if limit is not None: + params["limit"] = limit + if next_page_token: + params["after"] = next_page_token + + response = await client.get("/models", params=params) + + logger.info(f"Successfully retrieved {len(response.get('data', []))} models") + + return { + "success": True, + "data": response.get("data", []), + "pagination": response.get("pagination", {}), + "total_count": len(response.get("data", [])), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error listing models: {e}") + raise OpenRouterToolExecutionError( + f"Failed to list models: {str(e)}", + additional_prompt_content="There was an error retrieving the models. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + +async def search_models( + query: str, + limit: Optional[int] = 20, + category: Optional[str] = None, + provider: Optional[str] = None, +) -> Dict[str, Any]: + """ + Search for models based on various criteria. + + Args: + query: Search query string + limit: Maximum number of models to return (1-100, default 20) + category: Filter by model category (e.g., 'chat', 'completion', 'embedding') + provider: Filter by provider (e.g., 'anthropic', 'openai', 'meta-llama') + + Returns: + Dictionary containing matching models + """ + try: + validate_required_params({"query": query}, ["query"]) + + if limit is not None and (limit < 1 or limit > 100): + raise OpenRouterToolExecutionError( + "Invalid limit parameter", + additional_prompt_content="Limit must be between 1 and 100.", + developer_message=f"Invalid limit: {limit}", + ) + + client = get_client() + + all_models_response = await client.get("/models") + all_models = all_models_response.get("data", []) + + filtered_models = [] + query_lower = query.lower() + + for model in all_models: + model_name = model.get("id", "").lower() + model_description = model.get("description", "").lower() + + if query_lower in model_name or query_lower in model_description: + if category and model.get("category") != category: + continue + if provider and not model.get("id", "").startswith(f"{provider}/"): + continue + + filtered_models.append(model) + + if limit and len(filtered_models) >= limit: + break + + logger.info(f"Found {len(filtered_models)} models matching query: {query}") + + return { + "success": True, + "data": filtered_models, + "query": query, + "category": category, + "provider": provider, + "total_count": len(filtered_models), + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error searching models: {e}") + raise OpenRouterToolExecutionError( + f"Failed to search models: {str(e)}", + additional_prompt_content="There was an error searching for models. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_model_pricing(model_id: str) -> Dict[str, Any]: + """ + Get pricing information for a specific model. + + Args: + model_id: The ID of the model to get pricing for + + Returns: + Dictionary containing pricing information + """ + try: + validate_required_params({"model_id": model_id}, ["model_id"]) + validate_model_id(model_id) + + client = get_client() + + response = await client.get(f"/models/{model_id}") + + pricing = response.get("pricing", {}) + + logger.info(f"Successfully retrieved pricing for model: {model_id}") + + return { + "success": True, + "model_id": model_id, + "pricing": pricing, + "input_cost_per_1k_tokens": pricing.get("input", 0), + "output_cost_per_1k_tokens": pricing.get("output", 0), + "currency": "USD", + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting pricing for model {model_id}: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get pricing for model {model_id}: {str(e)}", + additional_prompt_content=f"There was an error retrieving pricing for model {model_id}. Please check the model ID and try again.", + developer_message=f"Unexpected error getting pricing for model {model_id}: {str(e)}", + ) \ No newline at end of file diff --git a/mcp_servers/openrouter/tools/usage.py b/mcp_servers/openrouter/tools/usage.py new file mode 100644 index 00000000..a023a808 --- /dev/null +++ b/mcp_servers/openrouter/tools/usage.py @@ -0,0 +1,283 @@ +""" +Usage tracking and user profile tools for OpenRouter MCP Server. +""" + +import logging +from typing import Dict, Any, Optional +from datetime import datetime, date +from .base import get_client, validate_required_params, OpenRouterToolExecutionError + +logger = logging.getLogger(__name__) + + +async def get_usage( + start_date: Optional[str] = None, + end_date: Optional[str] = None, + limit: Optional[int] = 100, +) -> Dict[str, Any]: + """ + Get usage statistics for the authenticated user. + + Args: + start_date: Start date in YYYY-MM-DD format (defaults to 30 days ago) + end_date: End date in YYYY-MM-DD format (defaults to today) + limit: Maximum number of records to return (1-1000, default 100) + + Returns: + Dictionary containing usage statistics + """ + try: + if limit is not None and (limit < 1 or limit > 1000): + raise OpenRouterToolExecutionError( + "Invalid limit parameter", + additional_prompt_content="Limit must be between 1 and 1000.", + developer_message=f"Invalid limit: {limit}", + ) + + if start_date: + try: + datetime.strptime(start_date, "%Y-%m-%d") + except ValueError: + raise OpenRouterToolExecutionError( + "Invalid start_date format", + additional_prompt_content="Start date must be in YYYY-MM-DD format.", + developer_message=f"Invalid start_date format: {start_date}", + ) + + if end_date: + try: + datetime.strptime(end_date, "%Y-%m-%d") + except ValueError: + raise OpenRouterToolExecutionError( + "Invalid end_date format", + additional_prompt_content="End date must be in YYYY-MM-DD format.", + developer_message=f"Invalid end_date format: {end_date}", + ) + + client = get_client() + + params = {} + if start_date: + params["start_date"] = start_date + if end_date: + params["end_date"] = end_date + if limit is not None: + params["limit"] = limit + + response = await client.get("/auth/key", params=params) + + logger.info(f"Successfully retrieved usage statistics") + + return { + "success": True, + "data": response, + "start_date": start_date, + "end_date": end_date, + "usage_summary": { + "total_requests": response.get("total_requests", 0), + "total_tokens": response.get("total_tokens", 0), + "total_cost": response.get("total_cost", 0), + "currency": "USD", + }, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting usage statistics: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get usage statistics: {str(e)}", + additional_prompt_content="There was an error retrieving usage statistics. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_user_profile() -> Dict[str, Any]: + """ + Get the current user's profile information. + + Returns: + Dictionary containing user profile information + """ + try: + client = get_client() + + response = await client.get("/auth/key") + + logger.info("Successfully retrieved user profile") + + return { + "success": True, + "data": response, + "user_info": { + "user_id": response.get("user_id"), + "email": response.get("email"), + "name": response.get("name"), + "credits": response.get("credits", 0), + "plan": response.get("plan", "free"), + "created_at": response.get("created_at"), + }, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting user profile: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get user profile: {str(e)}", + additional_prompt_content="There was an error retrieving your profile information. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_credits() -> Dict[str, Any]: + """ + Get the current user's credit balance. + + Returns: + Dictionary containing credit information with total_credits and total_usage + """ + try: + client = get_client() + + # Use the correct endpoint as per OpenRouter API documentation + response = await client.get("/credits") + + # Extract data from response according to API spec + data = response.get("data", {}) + total_credits = data.get("total_credits", 0) + total_usage = data.get("total_usage", 0) + + logger.info(f"Successfully retrieved credit balance: {total_credits} credits, {total_usage} used") + + return { + "success": True, + "total_credits": total_credits, + "total_usage": total_usage, + "available_credits": total_credits - total_usage, + "currency": "USD", + "data": response, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting credits: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get credits: {str(e)}", + additional_prompt_content="There was an error retrieving your credit balance. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_api_key_info() -> Dict[str, Any]: + """ + Get information about the current API key. + + Returns: + Dictionary containing API key information + """ + try: + client = get_client() + + response = await client.get("/auth/key") + + logger.info("Successfully retrieved API key information") + + return { + "success": True, + "data": response, + "key_info": { + "key_id": response.get("id"), + "name": response.get("name"), + "created_at": response.get("created_at"), + "last_used": response.get("last_used"), + "permissions": response.get("permissions", []), + "is_active": response.get("is_active", True), + }, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error getting API key info: {e}") + raise OpenRouterToolExecutionError( + f"Failed to get API key information: {str(e)}", + additional_prompt_content="There was an error retrieving API key information. Please try again.", + developer_message=f"Unexpected error: {str(e)}", + ) + + +async def get_cost_estimate( + model: str, + input_tokens: int, + output_tokens: Optional[int] = None, +) -> Dict[str, Any]: + """ + Estimate the cost for a specific model and token usage. + + Args: + model: The model ID to estimate costs for + input_tokens: Number of input tokens + output_tokens: Number of output tokens (optional, defaults to 0) + + Returns: + Dictionary containing cost estimate + """ + try: + validate_required_params({"model": model, "input_tokens": input_tokens}, ["model", "input_tokens"]) + + if input_tokens < 0: + raise OpenRouterToolExecutionError( + "Invalid input_tokens parameter", + additional_prompt_content="Input tokens must be a non-negative number.", + developer_message=f"Invalid input_tokens: {input_tokens}", + ) + + if output_tokens is not None and output_tokens < 0: + raise OpenRouterToolExecutionError( + "Invalid output_tokens parameter", + additional_prompt_content="Output tokens must be a non-negative number.", + developer_message=f"Invalid output_tokens: {output_tokens}", + ) + + client = get_client() + + model_response = await client.get(f"/models/{model}") + pricing = model_response.get("pricing", {}) + + input_cost_per_1k = pricing.get("input", 0) + output_cost_per_1k = pricing.get("output", 0) + + input_cost = (input_tokens / 1000) * input_cost_per_1k + output_cost = (output_tokens or 0) / 1000 * output_cost_per_1k + total_cost = input_cost + output_cost + + logger.info(f"Cost estimate for {model}: ${total_cost:.6f}") + + return { + "success": True, + "model": model, + "input_tokens": input_tokens, + "output_tokens": output_tokens or 0, + "cost_breakdown": { + "input_cost": input_cost, + "output_cost": output_cost, + "total_cost": total_cost, + }, + "pricing": { + "input_cost_per_1k_tokens": input_cost_per_1k, + "output_cost_per_1k_tokens": output_cost_per_1k, + "currency": "USD", + }, + } + + except OpenRouterToolExecutionError: + raise + except Exception as e: + logger.exception(f"Error estimating cost: {e}") + raise OpenRouterToolExecutionError( + f"Failed to estimate cost: {str(e)}", + additional_prompt_content="There was an error estimating the cost. Please check the model ID and try again.", + developer_message=f"Unexpected error: {str(e)}", + ) \ No newline at end of file diff --git a/mcp_servers/outlook/.env.example b/mcp_servers/outlook/.env.example new file mode 100644 index 00000000..9e0bc888 --- /dev/null +++ b/mcp_servers/outlook/.env.example @@ -0,0 +1,2 @@ +AUTH_DATA={"access_token":"your_outlook_access_token_here"} +OUTLOOK_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/outlook/.python-version b/mcp_servers/outlook/.python-version new file mode 100644 index 00000000..24ee5b1b --- /dev/null +++ b/mcp_servers/outlook/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/mcp_servers/outlook/Dockerfile b/mcp_servers/outlook/Dockerfile new file mode 100644 index 00000000..424d30df --- /dev/null +++ b/mcp_servers/outlook/Dockerfile @@ -0,0 +1,27 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies (gcc often needed for some Python deps) +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Upgrade pip to avoid annoying build errors +RUN pip install --no-cache-dir --upgrade pip + +# Copy requirements first to leverage Docker cache +COPY mcp_servers/outlook/requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy your server and tools +COPY mcp_servers/outlook/server.py . +COPY mcp_servers/outlook/tools/ ./tools/ + +# Expose the port your server listens on +EXPOSE 5000 + +# Start the server with unbuffered output (logs show immediately) +CMD ["python", "-u", "server.py"] diff --git a/mcp_servers/outlook/README.md b/mcp_servers/outlook/README.md new file mode 100644 index 00000000..e6926ca7 --- /dev/null +++ b/mcp_servers/outlook/README.md @@ -0,0 +1,67 @@ +# Outlook Mail Tools API + +This module provides a comprehensive interface for interacting with Microsoft Outlook's mail features through the Microsoft Graph API. It supports attachments, folder management, message rules, search folders, and core message operations. +Docs - https://learn.microsoft.com/en-us/graph/api/resources/mail-api-overview?view=graph-rest-1.0 + + +## Scope + +We use these Microsoft Graph scopes: + +- `Mail.Read` – read user mail +- `Mail.ReadWrite` – read and write user mail +- `MailboxSettings.Read` – read mailbox settings +- `MailboxSettings.ReadWrite` – read and write mailbox settings +- `Mail.Send` – send mail as the signed‑in user + +## Tool Categories + +--- + +### šŸ“ Folder Management + +| Tool Name | Description | Required Parameters | +| ---------------------------------------- | ------------------- | --------------------------- | +| `outlookMail_create_mail_folder` | Create a new folder | `display_name` | +| `outlookMail_list_folders` | List all folders | - | +| `outlookMail_get_mail_folder` | Get folder details | `folder_id` | +| `outlookMail_update_folder_display_name` | Rename folder | `folder_id`, `display_name` | +| `outlookMail_delete_folder` | Delete folder | `folder_id` | + +--- + +### āœ‰ļø Message Operations + +| Tool Name | Description | Required Parameters | +| --------------------------------------- | ---------------------------------- | --------------------------------------------- | +| `outlookMail_read_message` | Read email content | `message_id` | +| `outlookMail_send_draft` | Send draft message | `message_id` | +| `outlookMail_create_draft` | Create new draft | `subject`, `body_content`, `to_recipients` | +| `outlookMail_create_reply_draft` | Create reply draft | `message_id`, `comment` | +| `outlookMail_create_reply_all_draft` | Reply to all draft | `message_id`, `comment` | +| `outlookMail_create_forward_draft` | Create forward draft | `message_id`, `to_recipients`, `comment` | +| `outlookMail_update_draft` | Update draft | `message_id`, `subject`, `body_content`, etc. | +| `outlookMail_delete_draft` | Delete draft | `message_id` | +| `outlookMail_list_messages` | List messages in inbox | - | +| `outlookMail_list_messages_from_folder` | List messages from specific folder | `folder_id` | +| `outlookMail_move_message` | Move message to another folder | `message_id`, `destination_folder_id` | + +--- + +## āš™ļø Key Features + +- **Full Draft Control**: Create, update, reply, forward, and delete drafts with precision +- **Folder Management**: Create, rename, delete, and list folders effortlessly +- **Attachment Handling**: List attachments and fetch detailed info, including large file support +- **Targeted Message Actions**: Read, list, send, and move messages — by inbox or custom folders +- **Search Folder Support**: Create and manage custom mail search folders +- **Inbox Cleanup**: Move emails across folders to organize and declutter + +## Usage Requirements +- Microsoft Graph API access +- Proper authentication permissions +- Python 3.8+ environment + +For detailed parameter specifications and response formats, refer to individual tool schemas in the source code. All tools follow Microsoft Graph API conventions and data models. + +> **Note**: Most operations require `Mail.ReadWrite` permissions. Admin operations require delegated permissions. \ No newline at end of file diff --git a/mcp_servers/outlook/main.py b/mcp_servers/outlook/main.py new file mode 100644 index 00000000..8b505ba8 --- /dev/null +++ b/mcp_servers/outlook/main.py @@ -0,0 +1,6 @@ +def main(): + print("Hello from outook-mail!") + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/outlook/pyproject.toml b/mcp_servers/outlook/pyproject.toml new file mode 100644 index 00000000..d7a0323f --- /dev/null +++ b/mcp_servers/outlook/pyproject.toml @@ -0,0 +1,13 @@ +[project] +name = "outook-mail" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "click>=8.3.0", + "httpx>=0.28.1", + "mcp>=1.15.0", + "python-dotenv>=1.1.1", + "starlette>=0.48.0", +] diff --git a/mcp_servers/outlook/requirements.txt b/mcp_servers/outlook/requirements.txt new file mode 100644 index 00000000..d8ef513b --- /dev/null +++ b/mcp_servers/outlook/requirements.txt @@ -0,0 +1,5 @@ +mcp==1.12.3 +httpx +click +starlette +python-dotenv \ No newline at end of file diff --git a/mcp_servers/outlook/server.py b/mcp_servers/outlook/server.py new file mode 100644 index 00000000..af65b3d6 --- /dev/null +++ b/mcp_servers/outlook/server.py @@ -0,0 +1,934 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import List +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + + # mailFolder + outlookMail_delete_folder, + outlookMail_create_mail_folder, + outlookMail_list_folders, + outlookMail_get_mail_folder_details, + outlookMail_update_folder_display_name, + + # messages + outlookMail_read_message, + outlookMail_send_draft, + outlookMail_create_reply_all_draft, + outlookMail_list_messages, + outlookMail_create_draft, + outlookMail_create_reply_draft, + outlookMail_delete_draft, + outlookMail_update_draft, + outlookMail_create_forward_draft, + outlookMail_list_messages_from_folder, + outlookMail_move_message +) + + + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +OUTLOOK_MCP_SERVER_PORT = int(os.getenv("OUTLOOK_MCP_SERVER_PORT", "5000")) + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + +@click.command() +@click.option("--port", default=OUTLOOK_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) + +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("outlookMail-mcp-server") +#------------------------------------------------------------------- + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # File Operations + # mailfolder.py---------------------------------------------- + types.Tool( + name="outlookMail_delete_folder", + description="Delete an Outlook mail folder by ID.", + inputSchema={ + "type": "object", + "properties": { + "folder_id": {"type": "string", "description": "The ID of the folder to delete"} + }, + "required": ["folder_id"] + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_FOLDER"}) + ), + types.Tool( + name="outlookMail_create_mail_folder", + description="Create a new mail folder in the signed-in user's mailbox.", + inputSchema={ + "type": "object", + "properties": { + "display_name": {"type": "string", "description": "The name of the new folder"}, + "is_hidden": {"type": "boolean", "description": "Whether the folder is hidden (default False)"} + }, + "required": ["display_name"] + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_FOLDER"}) + ), + types.Tool( + name="outlookMail_list_folders", + description="List mail folders in the signed-in user's mailbox.", + inputSchema={ + "type": "object", + "properties": { + "include_hidden": {"type": "boolean", + "description": "Whether to include hidden folders (default True)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_FOLDER", "readOnlyHint": True}) + ), + + types.Tool( + name="outlookMail_get_mail_folder_details", + description="Get details of a specific mail folder by its ID.", + inputSchema={ + "type": "object", + "properties": { + "folder_id": {"type": "string", "description": "Unique ID of the mail folder"} + }, + "required": ["folder_id"] + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_FOLDER", "readOnlyHint": True}) + ), + + types.Tool( + name="outlookMail_update_folder_display_name", + description="Update the display name of an Outlook mail folder.", + inputSchema={ + "type": "object", + "properties": { + "folder_id": {"type": "string", "description": "ID of the mail folder to update"}, + "display_name": {"type": "string", "description": "New display name"} + }, + "required": ["folder_id", "display_name"] + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_FOLDER"}) + ), + + #messages.py----------------------------------------------------------- + types.Tool( + name="outlookMail_read_message", + description="Get a specific Outlook mail message by its ID using Microsoft Graph API.", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "The ID of the message to retrieve" + } + }, + "required": ["message_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE", "readOnlyHint": True}) + ), + types.Tool( + name="outlookMail_list_messages", + description="Retrieve a list of Outlook mail messages from the signed-in user's mailbox", + inputSchema={ + "type": "object", + "properties": { + "top": { + "type": "integer", + "description": "The maximum number of messages to return", + "default": 10, + "minimum": 1, + "maximum": 1000 + }, + "filter_query": { + "type": "string", + "description": "OData $filter expression to filter messages", + "examples": [ + "isRead eq false", + "importance eq 'high'", + "from/emailAddress/address eq 'example@example.com'", + "subject eq 'Welcome'", + "receivedDateTime ge 2025-07-01T00:00:00Z", + "hasAttachments eq true", + "isRead eq false and importance eq 'high'" + ] + }, + "orderby": { + "type": "string", + "description": "OData $orderby expression to sort results", + "examples": [ + "receivedDateTime desc", + "subject asc" + ] + }, + "select": { + "type": "string", + "description": "Comma-separated list of fields to include in response", + "examples": [ + "subject,from,receivedDateTime", + "id,subject,bodyPreview,isRead" + ] + } + }, + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE", "readOnlyHint": True}) + ), + types.Tool( + name="outlookMail_list_messages_from_folder", + description="Retrieve a list of Outlook mail messages from a specific folder in the signed-in user's mailbox", + inputSchema={ + "type": "object", + "properties": { + "folder_id": { + "type": "string", + "description": "The unique ID of the Outlook mail folder to retrieve messages from" + }, + "top": { + "type": "integer", + "description": "The maximum number of messages to return", + "default": 10, + "minimum": 1, + "maximum": 1000 + }, + "filter_query": { + "type": "string", + "description": "OData $filter expression to filter messages", + "examples": [ + "isRead eq false", + "importance eq 'high'", + "from/emailAddress/address eq 'example@example.com'", + "subject eq 'Welcome'", + "receivedDateTime ge 2025-07-01T00:00:00Z", + "hasAttachments eq true", + "isRead eq false and importance eq 'high'" + ] + }, + "orderby": { + "type": "string", + "description": "OData $orderby expression to sort results", + "examples": [ + "receivedDateTime desc", + "subject asc" + ] + }, + "select": { + "type": "string", + "description": "Comma-separated list of fields to include in response", + "examples": [ + "subject,from,receivedDateTime", + "id,subject,bodyPreview,isRead" + ] + } + }, + "required": ["folder_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE", "readOnlyHint": True}) + ), + types.Tool( + name="outlookMail_update_draft", + description="Updates an existing Outlook draft message using Microsoft Graph API (PATCH method)", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "ID of the draft message to update" + }, + "subject": { + "type": "string", + "description": "Message subject (only updatable in draft state)" + }, + "body_content": { + "type": "string", + "description": "HTML content of the message body (only updatable in draft state)" + }, + "to_recipients": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "Recipient email addresses for 'To' (only updatable in draft state)" + }, + "cc_recipients": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "Recipient email addresses for 'Cc' (only updatable in draft state)" + }, + "bcc_recipients": { + "type": "array", + "items": {"type": "string", "format": "email"}, + "description": "Recipient email addresses for 'Bcc' (only updatable in draft state)" + } + }, + "required": ["message_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + + types.Tool( + name="outlookMail_delete_draft", + description="Delete an existing Outlook draft message by message ID", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "The ID of the draft message to delete" + } + }, + "required": ["message_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_create_forward_draft", + description="Create a draft forward message for an existing Outlook message", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "ID of the original message to forward" + }, + "comment": { + "type": "string", + "description": "Comment to include in the forwarded message" + }, + "to_recipients": { + "type": "array", + "items": { + "type": "string", + "format": "email" + }, + "description": "List of recipient email addresses" + } + }, + "required": ["message_id", "comment", "to_recipients"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_create_reply_draft", + description="Create a draft reply message to an existing Outlook message", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "ID of the original message to reply to" + }, + "comment": { + "type": "string", + "description": "Comment to include in the reply" + } + }, + "required": ["message_id", "comment"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_create_reply_all_draft", + description="Create a reply-all draft to an existing Outlook message", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "The ID of the message to reply to" + }, + "comment": { + "type": "string", + "description": "Text to include in the reply body", + "default": "" + } + }, + "required": ["message_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_send_draft", + description="Send an existing draft Outlook mail message", + inputSchema={ + "type": "object", + "properties": { + "message_id": {"type": "string", "description": "The ID of the draft message to send"} + }, + "required": ["message_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_create_draft", + description="Create a draft Outlook mail message using Microsoft Graph API (POST method)", + inputSchema={ + "type": "object", + "properties": { + "subject": { + "type": "string", + "description": "Subject of the draft message" + }, + "body_content": { + "type": "string", + "description": "HTML content of the message body" + }, + "to_recipients": { + "type": "array", + "items": { + "type": "string", + "format": "email" + }, + "description": "List of email addresses for the 'To' field" + }, + "cc_recipients": { + "type": "array", + "items": { + "type": "string", + "format": "email" + }, + "description": "List of email addresses for 'Cc'" + }, + "bcc_recipients": { + "type": "array", + "items": { + "type": "string", + "format": "email" + }, + "description": "List of email addresses for 'Bcc'" + } + }, + "required": ["subject", "body_content", "to_recipients"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ), + types.Tool( + name="outlookMail_move_message", + description="Move an Outlook mail message to another folder by folder ID or well-known name like 'deleteditems'.", + inputSchema={ + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "ID of the message to move" + }, + "destination_folder_id": { + "type": "string", + "description": "ID of the destination folder (e.g. 'deleteditems' or a custom folder ID)" + } + }, + "required": ["message_id", "destination_folder_id"], + "additionalProperties": False + }, + annotations=types.ToolAnnotations(**{"category": "OUTLOOK_MESSAGE"}) + ) + + ] + + @app.call_tool() + async def call_tool( + name: str, + arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # Mail Folder Operations + if name == "outlookMail_delete_folder": + try: + result = await outlookMail_delete_folder( + folder_id=arguments["folder_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error deleting folder: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_create_mail_folder": + try: + result = await outlookMail_create_mail_folder( + display_name=arguments["display_name"], + is_hidden=arguments.get("is_hidden", False) + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating mail folder: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_list_folders": + try: + result = await outlookMail_list_folders( + include_hidden=arguments.get("include_hidden", True) + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing folders: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_get_mail_folder_details": + try: + result = await outlookMail_get_mail_folder_details( + folder_id=arguments["folder_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error getting folder: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_update_folder_display_name": + try: + result = await outlookMail_update_folder_display_name( + folder_id=arguments["folder_id"], + display_name=arguments["display_name"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error updating folder display name: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Message Operations + elif name == "outlookMail_read_message": + try: + result = await outlookMail_read_message( + message_id=arguments["message_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}" + ) + ] + + elif name == "outlookMail_list_messages": + try: + result = await outlookMail_list_messages( + top=arguments.get("top", 10), + filter_query=arguments.get("filter_query"), + orderby=arguments.get("orderby"), + select=arguments.get("select") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing messages: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_list_messages_from_folder": + try: + result = await outlookMail_list_messages_from_folder( + folder_id=arguments["folder_id"], + top=arguments.get("top", 10), + filter_query=arguments.get("filter_query"), + orderby=arguments.get("orderby"), + select=arguments.get("select") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error listing messages from folder: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_update_draft": + try: + result = await outlookMail_update_draft( + message_id=arguments["message_id"], + subject=arguments.get("subject"), + body_content=arguments.get("body_content"), + to_recipients=arguments.get("to_recipients"), + cc_recipients=arguments.get("cc_recipients"), + bcc_recipients=arguments.get("bcc_recipients"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error updating draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + # Message Operations + elif name == "outlookMail_delete_draft": + try: + result = await outlookMail_delete_draft( + message_id=arguments["message_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error deleting draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_create_forward_draft": + try: + result = await outlookMail_create_forward_draft( + message_id=arguments["message_id"], + comment=arguments["comment"], + to_recipients=arguments["to_recipients"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating forward draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_create_reply_draft": + try: + result = await outlookMail_create_reply_draft( + message_id=arguments["message_id"], + comment=arguments["comment"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating reply draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_create_reply_all_draft": + try: + result = await outlookMail_create_reply_all_draft( + message_id=arguments["message_id"], + comment=arguments.get("comment", "") + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating reply-all draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_send_draft": + try: + result = await outlookMail_send_draft( + message_id=arguments["message_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error sending draft: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "outlookMail_create_draft": + try: + result = await outlookMail_create_draft( + subject=arguments["subject"], + body_content=arguments["body_content"], + to_recipients=arguments["to_recipients"], + cc_recipients=arguments.get("cc_recipients"), + bcc_recipients=arguments.get("bcc_recipients"), + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error creating draft message: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + elif name == "outlookMail_move_message": + try: + result = await outlookMail_move_message( + message_id=arguments["message_id"], + destination_folder_id=arguments["destination_folder_id"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error moving message: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + #------------------------------------------------------------------------- + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract access token using standard method + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract access token using standard method + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/outlook/tools/__init__.py b/mcp_servers/outlook/tools/__init__.py new file mode 100644 index 00000000..29eddea1 --- /dev/null +++ b/mcp_servers/outlook/tools/__init__.py @@ -0,0 +1,52 @@ +from .base import ( +auth_token_context +) + +from .mailFolder import ( +outlookMail_delete_folder, +outlookMail_create_mail_folder, +outlookMail_list_folders, +outlookMail_get_mail_folder_details, +outlookMail_update_folder_display_name, +) + + + +from .messages import ( +outlookMail_create_reply_all_draft, +outlookMail_list_messages, +outlookMail_read_message, +outlookMail_create_draft, +outlookMail_list_messages_from_folder, +outlookMail_create_reply_draft, +outlookMail_delete_draft, +outlookMail_update_draft, +outlookMail_create_forward_draft, +outlookMail_send_draft, +outlookMail_move_message +) + +__all__ = [ + #base.py + "auth_token_context", + + #mailfolder.py + "outlookMail_delete_folder", + "outlookMail_create_mail_folder", + "outlookMail_list_folders", + "outlookMail_get_mail_folder_details", + "outlookMail_update_folder_display_name", + + #messages.py + "outlookMail_send_draft", + "outlookMail_read_message", + "outlookMail_create_reply_all_draft", + "outlookMail_list_messages", + "outlookMail_create_draft", + "outlookMail_create_reply_draft", + "outlookMail_delete_draft", + "outlookMail_update_draft", + "outlookMail_create_forward_draft", + "outlookMail_list_messages_from_folder", + "outlookMail_move_message" +] \ No newline at end of file diff --git a/mcp_servers/outlook/tools/base.py b/mcp_servers/outlook/tools/base.py new file mode 100644 index 00000000..e26f55cf --- /dev/null +++ b/mcp_servers/outlook/tools/base.py @@ -0,0 +1,57 @@ +import logging +import os +import json +from contextvars import ContextVar +from typing import Optional +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() + +logger = logging.getLogger(__name__) + +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + try: + token = auth_token_context.get() + if token: + return token + except LookupError: + pass + + # Fallback to AUTH_DATA environment variable + auth_data = os.getenv("AUTH_DATA") + if auth_data: + try: + auth_json = json.loads(auth_data) + access_token = auth_json.get('access_token', '') + if access_token: + return access_token + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse AUTH_DATA JSON: {e}") + + # Legacy fallback to OUTLOOK_ACCESS_TOKEN + token = os.getenv("OUTLOOK_ACCESS_TOKEN") + if token: + return token + + raise RuntimeError("Authentication token not found in context or environment") + +def get_outlookMail_client() -> Optional[dict]: + """ + Return a simple client dict with base_url and headers. + """ + try: + auth_token = get_auth_token() + client = { + "base_url": "https://graph.microsoft.com/v1.0", + "headers": {'Authorization': f"Bearer {auth_token}"} + } + return client + except RuntimeError as e: + logger.warning(f"Failed to get auth token: {e}") + return None + +if __name__ == '__main__': + print(get_outlookMail_client()) \ No newline at end of file diff --git a/mcp_servers/outlook/tools/mailFolder.py b/mcp_servers/outlook/tools/mailFolder.py new file mode 100644 index 00000000..65a225bf --- /dev/null +++ b/mcp_servers/outlook/tools/mailFolder.py @@ -0,0 +1,202 @@ +import httpx +import logging +from .base import get_outlookMail_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def outlookMail_list_folders(include_hidden: bool = True) -> dict: + """ + List mail folders in the signed-in user's mailbox. + + Args: + include_hidden (bool, optional): Whether to include hidden folders. Defaults to True. + + Returns: + dict: JSON response with list of folders or an error. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders" + params = {} + if include_hidden: + params["includeHiddenFolders"] = "true" + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers'], params=params) + response.raise_for_status() + return response.json() + except Exception as e: + logging.error(f"Could not get mail folders from {url}: {e}") + return {"error": f"Could not get mail folders from {url}: {e}"} + +async def outlookMail_get_messages_from_folder( + folder_id: str, + top: int = 10, + filter_query: str = None, + orderby: str = None, + select: str = None +)-> dict: + """ + Retrieve messages from a specific Outlook mail folder. + + Args: + folder_id (str): The unique ID of the mail folder. + top (int, optional): Max number of messages to return (default: 10). + filter_query (str, optional): OData $filter expression (e.g., "contains(subject, 'weekly digest')"). + orderby (str, optional): OData $orderby expression (e.g., "receivedDateTime desc"). + select (str, optional): Comma-separated list of properties to include. + + Returns: + dict: JSON response with list of messages, or error info. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders/{folder_id}/messages" + params = {'$top': top} + + if filter_query: + params['$filter'] = filter_query + if orderby: + params['$orderby'] = orderby + if select: + params['$select'] = select + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers'], params=params) + response.raise_for_status() + return response.json() + except Exception as e: + logging.error(f"Could not get messages from {url}: {e}") + return {"error": f"Could not get messages from {url}"} + +async def outlookMail_get_mail_folder_details(folder_id: str) -> dict: + """ + Get details of a specific mail folder by its ID. + + Args: + folder_id (str): The unique ID of the mail folder. + + Returns: + dict: JSON response from Microsoft Graph with folder details, + or error info if request fails. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders/{folder_id}" + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers']) + response.raise_for_status() + return response.json() + except Exception as e: + logging.error(f"Could not get mail folder at {url}: {e}") + return {"error": f"Could not get mail folder at {url}"} + +async def outlookMail_create_mail_folder( + display_name: str, + is_hidden: bool = False +) -> dict: + """ + Create a new mail folder in the signed-in user's mailbox. + + Args: + display_name (str): The name of the new folder. + is_hidden (bool, optional): Whether the folder is hidden. Defaults to False. + + Returns: + dict: JSON response from Microsoft Graph with the created folder info, + or error info if request fails. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders" + payload = { + "displayName": display_name, + "isHidden": is_hidden + } + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + response.raise_for_status() + return response.json() + except Exception as e: + logging.error(f"Could not create mail folder at {url}: {e}") + return {"error": f"Could not create mail folder at {url}"} + +async def outlookMail_update_folder_display_name( + folder_id: str, + display_name: str +) -> dict: + """ + Update the display name of an Outlook mail folder. + + Args: + folder_id (str): ID of the mail folder to update. + display_name (str): New display name. + + Returns: + dict: JSON response on success, or error details. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders/{folder_id}" + payload = {"displayName": display_name} + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.patch(url, headers=client['headers'], json=payload) + response.raise_for_status() + return response.json() + except Exception as e: + logging.error(f"Failed to update folder at {url}: {e}") + return {"error": f"Failed to update folder at {url}"} + +async def outlookMail_delete_folder(folder_id: str) -> dict: + """ + Delete an Outlook mail folder by ID. + + Args: + folder_id (str): The ID of the folder to delete. + + Returns: + dict: Result message or error details. + """ + client = get_outlookMail_client() + if not client: + logging.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders/{folder_id}" + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.delete(url, headers=client['headers']) + if response.status_code == 204: + logging.info(f"Deleted folder with ID {folder_id}") + return {"message": f"Folder {folder_id} deleted successfully"} + else: + logging.error(f"Failed to delete folder {folder_id}: {response.text}") + return {"error": f"Unexpected response: {response.status_code}", "details": response.text} + except Exception as e: + logging.error(f"Could not delete folder at {url}: {e}") + return {"error": f"Could not delete folder at {url}"} diff --git a/mcp_servers/outlook/tools/messages.py b/mcp_servers/outlook/tools/messages.py new file mode 100644 index 00000000..3696ddf6 --- /dev/null +++ b/mcp_servers/outlook/tools/messages.py @@ -0,0 +1,549 @@ +import httpx +import logging +from .base import get_outlookMail_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def outlookMail_list_messages( + top: int = 10, + filter_query: str = None, + orderby: str = None, + select: str = None +) -> dict: + """ + Retrieve a list of Outlook mail messages from the signed-in user's mailbox. + + Args: + top (int, optional): + The maximum number of messages to return. Defaults to 10. + filter_query (str, optional): + An OData $filter expression to filter messages by specific criteria. + Example filters you can use: + - "isRead eq false" → Only unread emails + - "importance eq 'high'" → Emails marked as high importance + - "from/emailAddress/address eq 'example@example.com'" → Emails sent by a specific address + - "subject eq 'Welcome'" → Emails with a specific subject + - "receivedDateTime ge 2025-07-01T00:00:00Z" → Emails received after a date + - "hasAttachments eq true" → Emails that include attachments + - Combine filters: "isRead eq false and importance eq 'high'" + orderby (str, optional): + An OData $orderby expression to sort results. + Example: "receivedDateTime desc" (newest first) + select (str, optional): + Comma-separated list of fields to include in the response. + Example: "subject,from,receivedDateTime" + + Returns: + dict: JSON response from the Microsoft Graph API containing the list of messages + or an error message if the request fails. + + Notes: + - Requires an authenticated Outlook client. + - This function internally builds the API request to: + GET https://graph.microsoft.com/v1.0/me/messages + with the provided query parameters. + """ + + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + logger.info("Retrieving Outlook mail messages") + + url = f"{client['base_url']}/me/messages" + params = {'$top': top} + + if filter_query: + params['$filter'] = filter_query + if orderby: + params['$orderby'] = orderby + if select: + params['$select'] = select + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers'], params=params) + return response.json() + except Exception as e: + logger.error(f"Could not get Outlook messages from {url}: {e}") + return {"error": f"Could not get Outlook messages from {url}"} + + +async def outlookMail_list_messages_from_folder( + folder_id: str, + top: int = 10, + filter_query: str = None, + orderby: str = None, + select: str = None +) -> dict: + """ + Retrieve a list of Outlook mail messages from a specific folder in the signed-in user's mailbox. + + Args: + folder_id (str): + The unique ID of the Outlook mail folder to retrieve messages from. + Example: 'AQMkADAwATNiZmYAZS05YmUxLTk3NDYtMDACLTAwCgAuAAAD...' + top (int, optional): + The maximum number of messages to return. Defaults to 10. + filter_query (str, optional): + An OData $filter expression to filter messages by specific criteria. + Example filters you can use: + - "isRead eq false" → Only unread emails + - "importance eq 'high'" → Emails marked as high importance + - "from/emailAddress/address eq 'example@example.com'" → Emails sent by a specific address + - "subject eq 'Welcome'" → Emails with a specific subject + - "receivedDateTime ge 2025-07-01T00:00:00Z" → Emails received after a date + - "hasAttachments eq true" → Emails that include attachments + - Combine filters: "isRead eq false and importance eq 'high'" + orderby (str, optional): + An OData $orderby expression to sort results. + Example: "receivedDateTime desc" (newest first) + select (str, optional): + Comma-separated list of fields to include in the response. + Example: "subject,from,receivedDateTime" + + Returns: + dict: JSON response from the Microsoft Graph API containing the list of messages, + or an error message if the request fails. + + Notes: + - Requires an authenticated Outlook client. + - This function sends a GET request to: + https://graph.microsoft.com/v1.0/me/mailFolders/{folder_id}/messages + with the provided query parameters. + """ + + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/mailFolders/{folder_id}/messages" + params = {'$top': top} + + if filter_query: + params['$filter'] = filter_query + if orderby: + params['$orderby'] = orderby + if select: + params['$select'] = select + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.get(url, headers=client['headers'], params=params) + return response.json() + except Exception as e: + logger.error(f"Could not get Outlook messages from {url}: {e}") + return {"error": f"Could not get Outlook messages from {url}"} + +async def outlookMail_read_message(message_id: str) -> dict: + """ + Get a specific Outlook mail message by its ID using Microsoft Graph API. + + Parameters: + ----------- + message_id (str): The ID of the message to retrieve + + Returns: + -------- + dict: The message object, or error on failure + """ + url = f"https://graph.microsoft.com/v1.0/me/messages/%7Bmessage_id%7D" + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + async with httpx.AsyncClient() as httpx_client: + res = await httpx_client.get(url, headers=client['headers']) + return res.json() + + +async def outlookMail_create_draft( + subject: str, + body_content: str, + to_recipients: list, + cc_recipients: list = None, + bcc_recipients: list = None, +) -> dict: + """ + Create a draft Outlook mail message using Microsoft Graph API (POST method) + + Required parameters: + -------------------- + subject (str): Subject of the draft message + body_content (str): HTML content of the message body + to_recipients (list): List of email addresses for the "To" field + + Optional parameters: + -------------------- + cc_recipients (list): List of email addresses for "Cc" + bcc_recipients (list): List of email addresses for "Bcc" + + Returns: + -------- + dict: Created draft object on success, or an error dictionary on failure + + Notes: + ------ + - The draft is saved to the user's Drafts folder. + - Recipient lists accept simple email strings; function builds correct schema. + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages" + payload = { + "subject": subject, + "body": { + "contentType": "HTML", + "content": body_content + } + } + + + # Recipient fields to add dynamically + recipient_fields = { + "toRecipients": to_recipients, + "ccRecipients": cc_recipients, + "bccRecipients": bcc_recipients + } + + for key, emails in recipient_fields.items(): + if emails: + payload[key] = [{"emailAddress": {"address": email}} for email in emails] + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Could not create Outlook draft message at {url}: {e}") + return {"error": f"Could not create Outlook draft message at {url}"} + +async def outlookMail_update_draft( + message_id: str, + subject: str = None, + body_content: str = None, + to_recipients: list = None, + cc_recipients: list = None, + bcc_recipients: list = None, +) -> dict: + """ + Updates an existing Outlook draft message using Microsoft Graph API (PATCH method) + + Required parameter: + message_id (str): ID of the draft message to update (e.g., "AAMkAGM2...") + + Draft-Specific Parameters (updatable only in draft state): + subject (str): Message subject + body_content (str): HTML content of the message body + internet_message_id (str): RFC2822 message ID + reply_to (list): Email addresses for reply-to + toRecipients/ccRecipients/bccRecipients (list): Recipient email addresses + + Returns: + dict: Updated message object on success, error dictionary on failure + + Parameter Structures: + -------------------- + 1. Recipient Structure (for from_sender/sender): + { + "emailAddress": { + "address": "user@domain.com", # REQUIRED email + "name": "Display Name" # Optional display name + } + } + + 2. followupFlag Structure (for flag parameter): + { + "completedDateTime": { # Completion date/time + "dateTime": "yyyy-MM-ddThh:mm:ss", + "timeZone": "TimezoneName" + }, + "dueDateTime": { # Due date/time (requires startDateTime) + "dateTime": "yyyy-MM-ddThh:mm:ss", + "timeZone": "TimezoneName" + }, + "flagStatus": "flagged", # "notFlagged", "flagged", "complete" + "startDateTime": { # Start date/time + "dateTime": "yyyy-MM-ddThh:mm:ss", + "timeZone": "TimezoneName" + } + } + + 3. Body Structure (handled automatically from body_content): + { + "contentType": "HTML", # Fixed as HTML + "content": "..." + } + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}" + payload = {} + + # Add plain fields + fields = { + "subject": subject, + } + + for key, value in fields.items(): + if value is not None: + payload[key] = value + + # Add body if provided + if body_content: + payload["body"] = { + "contentType": "HTML", + "content": body_content + } + + # Add recipients + recipient_fields = { + "toRecipients": to_recipients, + "ccRecipients": cc_recipients, + "bccRecipients": bcc_recipients + } + + for key, emails in recipient_fields.items(): + if emails: + payload[key] = [{"emailAddress": {"address": email}} for email in emails] + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.patch(url, headers=client['headers'], json=payload) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Could not update Outlook draft message at {url}: {e}") + return {"error": f"Could not update Outlook draft message at {url}"} + + +async def outlookMail_delete_draft(message_id: str) -> dict: + """ + Delete an existing Outlook draft message by message ID. + + Args: + message_id (str): The ID of the draft message to Delete. + + Returns: + dict: JSON response from Microsoft Graph API with updated draft details, + or an error message if the request fails. + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}" + + try: + logger.info(f"Deleting draft Outlook mail message at {url}") + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.delete(url, headers=client['headers']) + if response.status_code == 204: + return {"Success":"Deleted"} + else: + logger.warning(f"Unexpected status code: {response.status_code}") + # try to parse error if there is one + try: + error_response = response.json() + logger.error(f"Delete failed with response: {error_response}") + return error_response + except Exception as parse_error: + logger.error(f"Could not parse error response: {parse_error}") + return {"error": f"Unexpected response: {response.status_code}"} + except Exception as e: + logger.error(f"Could not delete Outlook draft message at {url}: {e}") + return {"error": f"Could not delete Outlook draft message at {url}"} + + +async def outlookMail_create_forward_draft( + message_id: str, + comment: str, + to_recipients: list +) -> dict: + """ + Create a draft forward message for an existing Outlook message. + + Args: + message_id (str): ID of the original message to forward. + comment (str): Comment to include in the forwarded message. + to_recipients (list): List of recipient email addresses as strings. + + Returns: + dict: JSON response from Microsoft Graph API with the created draft forward's details, + or an error message if the request fails. + """ + client = get_outlookMail_client() # same method you used to get your client and headers + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}/createForward" + + # Build recipient list in required format + recipients = [{"emailAddress": {"address": email}} for email in to_recipients] + + payload = { + "comment": comment, + "toRecipients": recipients + } + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + return response.json() + except Exception as e: + logger.error(f"Could not create Outlook forward draft message at {url}: {e}") + return {"error": f"Could not create Outlook forward draft message at {url}"} + +async def outlookMail_create_reply_draft( + message_id: str, + comment: str +) -> dict: + """ + Create a draft reply message to an existing Outlook message. + + Args: + message_id (str): ID of the original message to reply to. + comment (str): Comment to include in the reply. + + + Returns: + dict: JSON response from Microsoft Graph API with the created draft reply's details, + or an error message if the request fails. + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}/createReply" + + payload = { + "comment": comment + } + + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + return response.json() + except Exception as e: + logger.error(f"Could not create Outlook reply draft message at {url}: {e}") + return {"error": f"Could not create Outlook reply draft message at {url}"} + + +async def outlookMail_create_reply_all_draft( + message_id: str, + comment: str = "" +) -> dict: + """ + Create a reply-all draft to an existing Outlook message. + + Args: + message_id (str): The ID of the message you want to reply to. + comment (str, optional): Text to include in the reply body. + + Returns: + dict: JSON response from Microsoft Graph API with the draft details, + or an error message if the request fails. + """ + client = get_outlookMail_client() # reuse your existing token logic + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}/createReplyAll" + + payload = { + "comment": comment + } + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + return response.json() + except Exception as e: + logger.error(f"Could not create reply-all draft at {url}: {e}") + return {"error": f"Could not create reply-all draft at {url}"} + +async def outlookMail_send_draft(message_id: str) -> dict: + """ + Send an existing draft Outlook mail message by message ID. + + Args: + message_id (str): The ID of the draft message to send. + + Returns: + dict: Empty response if successful, or error details. + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}/send" + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers']) + if response.status_code == 202 or response.status_code == 200 or response.status_code == 204: + logger.info("Draft sent successfully") + return {"success": "Draft sent successfully"} + else: + try: + return response.json() + except Exception: + return {"error": f"Unexpected response: {response.status_code}"} + except Exception as e: + logger.error(f"Could not send Outlook draft message at {url}: {e}") + return {"error": f"Could not send Outlook draft message at {url}"} + +async def outlookMail_move_message( + message_id: str, + destination_folder_id: str +) -> dict: + """ + Move an Outlook mail message to another folder. + + Args: + message_id (str): ID of the message to move. + destination_folder_id (str): ID of the target folder. + Example: 'deleteditems' or actual folder ID. + + Returns: + dict: JSON response from Microsoft Graph API with moved message details, + or an error message if it fails. + """ + client = get_outlookMail_client() + if not client: + logger.error("Could not get Outlook client") + return {"error": "Could not get Outlook client"} + + url = f"{client['base_url']}/me/messages/{message_id}/move" + + payload = { + "destinationId": destination_folder_id + } + + try: + async with httpx.AsyncClient() as httpx_client: + response = await httpx_client.post(url, headers=client['headers'], json=payload) + return response.json() + except Exception as e: + logger.error(f"Could not move Outlook mail message at {url}: {e}") + return {"error": f"Could not move Outlook mail message at {url}"} + + +if __name__ == "__main__": + #print(await outlookMail_create_draft('dss','sds','dsds')) + pass \ No newline at end of file diff --git a/mcp_servers/outlook/uv.lock b/mcp_servers/outlook/uv.lock new file mode 100644 index 00000000..24817ddf --- /dev/null +++ b/mcp_servers/outlook/uv.lock @@ -0,0 +1,423 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +version = "1.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/9e/e65114795f359f314d7061f4fcb50dfe60026b01b52ad0b986b4631bf8bb/mcp-1.15.0.tar.gz", hash = "sha256:5bda1f4d383cf539d3c035b3505a3de94b20dbd7e4e8b4bd071e14634eeb2d72", size = 469622, upload-time = "2025-09-25T15:39:51.995Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/82/4d0df23d5ff5bb982a59ad597bc7cb9920f2650278ccefb8e0d85c5ce3d4/mcp-1.15.0-py3-none-any.whl", hash = "sha256:314614c8addc67b663d6c3e4054db0a5c3dedc416c24ef8ce954e203fdc2333d", size = 166963, upload-time = "2025-09-25T15:39:50.538Z" }, +] + +[[package]] +name = "outook-mail" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "python-dotenv" }, + { name = "starlette" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.3.0" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "mcp", specifier = ">=1.15.0" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, + { name = "starlette", specifier = ">=0.48.0" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, +] diff --git a/mcp_servers/package-lock.json b/mcp_servers/package-lock.json new file mode 100644 index 00000000..ee4df213 --- /dev/null +++ b/mcp_servers/package-lock.json @@ -0,0 +1,2160 @@ +{ + "name": "klavis-ai-mcp-servers", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "klavis-ai-mcp-servers", + "version": "1.0.0", + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.6", + "lint-staged": "^15.5.1", + "prettier": "^3.5.3" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.6.1", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgr/core": { + "version": "0.2.4", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/type-utils": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.18.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.18.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.18.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "dev": true, + "license": "ISC" + }, + "node_modules/acorn": { + "version": "8.14.1", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-escapes": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "environment": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-union": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cli-cursor": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "slice-ansi": "^5.0.0", + "string-width": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "13.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/emoji-regex": { + "version": "10.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/environment": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "5.2.6", + "dev": true, + "license": "MIT", + "dependencies": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.11.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" + }, + "peerDependencies": { + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/execa": { + "version": "8.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "dev": true, + "license": "ISC" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream": { + "version": "8.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graphemer": { + "version": "1.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/human-signals": { + "version": "5.0.0", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "license": "ISC" + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-stream": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lint-staged": { + "version": "15.5.1", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^5.4.1", + "commander": "^13.1.0", + "debug": "^4.4.0", + "execa": "^8.0.1", + "lilconfig": "^3.1.3", + "listr2": "^8.2.5", + "micromatch": "^4.0.8", + "pidtree": "^0.6.0", + "string-argv": "^0.3.2", + "yaml": "^2.7.0" + }, + "bin": { + "lint-staged": "bin/lint-staged.js" + }, + "engines": { + "node": ">=18.12.0" + }, + "funding": { + "url": "https://opencollective.com/lint-staged" + } + }, + "node_modules/lint-staged/node_modules/chalk": { + "version": "5.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/listr2": { + "version": "8.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "cli-truncate": "^4.0.0", + "colorette": "^2.0.20", + "eventemitter3": "^5.0.1", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "dev": true, + "license": "MIT" + }, + "node_modules/log-update": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-regex": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/is-fullwidth-code-point": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/log-update/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mimic-function": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pidtree": { + "version": "0.6.0", + "dev": true, + "license": "MIT", + "bin": { + "pidtree": "bin/pidtree.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.5.3", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/restore-cursor": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor/node_modules/onetime": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-function": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/semver": { + "version": "7.7.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/string-argv": { + "version": "0.3.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6.19" + } + }, + "node_modules/string-width": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/synckit": { + "version": "0.11.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.3", + "tslib": "^2.8.1" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-api-utils": { + "version": "1.4.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "dev": true, + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "9.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.7.1", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/mcp_servers/package.json b/mcp_servers/package.json new file mode 100644 index 00000000..14a2acfa --- /dev/null +++ b/mcp_servers/package.json @@ -0,0 +1,21 @@ +{ + "name": "klavis-ai-mcp-servers", + "version": "1.0.0", + "private": true, + "scripts": { + "lint": "eslint \"**/*.ts\" --max-warnings=0", + "lint:fix": "eslint \"**/*.ts\" --fix", + "format": "prettier --write \"**/*.{js,jsx,ts,tsx,json,md}\"", + "format:check": "prettier --check \"**/*.{js,jsx,ts,tsx,json,md}\"", + "typecheck": "tsc --noEmit" + }, + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.6", + "lint-staged": "^15.5.1", + "prettier": "^3.5.3" + } +} \ No newline at end of file diff --git a/mcp_servers/pandoc/Dockerfile b/mcp_servers/pandoc/Dockerfile index b27540b2..0fe1e02d 100644 --- a/mcp_servers/pandoc/Dockerfile +++ b/mcp_servers/pandoc/Dockerfile @@ -18,7 +18,7 @@ RUN pip install --no-cache-dir -r requirements.txt # Copy the server code COPY mcp_servers/pandoc/server.py . -COPY mcp_servers/pandoc/.env . +COPY mcp_servers/pandoc/.env.example .env # Expose the port the server runs on EXPOSE 5000 diff --git a/mcp_servers/pandoc/README.md b/mcp_servers/pandoc/README.md index 60f913bc..34ef696a 100644 --- a/mcp_servers/pandoc/README.md +++ b/mcp_servers/pandoc/README.md @@ -1,168 +1,73 @@ # Pandoc MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -[![Python: 3.12+](https://img.shields.io/badge/Python-3.12+-blue.svg)](https://www.python.org/downloads/) -[![FastAPI](https://img.shields.io/badge/FastAPI-0.100.0+-00a393.svg)](https://fastapi.tiangolo.com/) -[![Pandoc](https://img.shields.io/badge/Pandoc-latest-blue.svg)](https://pandoc.org/) +A Model Context Protocol (MCP) server for Pandoc integration. Convert documents between different formats using Pandoc's universal document converter. -## šŸ“– Overview +## šŸš€ Quick Start - Run in 30 Seconds -Pandoc MCP Server is a Model Context Protocol (MCP) implementation that converts Markdown text into various document formats using Pandoc. It provides a standardized interface for document conversion, uploading to Google Cloud Storage (GCS), and returning publicly accessible signed URLs. +### 🌐 Using Hosted Service (Recommended for Production) -## šŸš€ Features +Get instant access to Pandoc with our managed infrastructure - **no setup required**: -This server provides the following capabilities through MCP tools: +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -| Tool | Description | -|------|-------------| -| `convert_markdown_to_file` | Converts markdown text to various formats (PDF, DOCX, DOC, HTML, HTML5) and returns a GCS signed URL | - -## šŸ”§ Prerequisites - -You'll need one of the following: - -- **Docker:** Docker installed and running (recommended) -- **Python:** Python 3.12+ with pip - -Additionally, you'll need: - -- **Google Cloud Storage (GCS) Bucket:** A GCS bucket where the server can upload converted files -- **Google Cloud Credentials:** Application Default Credentials (ADC) configured in your environment - -You can configure ADC by running: ```bash -gcloud auth application-default login +pip install klavis +# or +npm install klavis ``` -For local Python setup, you'll also need: -- **Pandoc:** The Pandoc document converter -- **LaTeX:** A LaTeX distribution (like TeX Live or MiKTeX) for PDF conversion - -## āš™ļø Setup & Configuration - -### Environment Configuration - -1. **Create your environment file**: - ```bash - cp .env.example .env - ``` - -2. **Edit the `.env` file** with your GCS bucket name: - ``` - GCS_BUCKET_NAME=your-gcs-bucket-name - ``` - -## šŸƒā€ā™‚ļø Running the Server - -### Option 1: Docker (Recommended) - -The Docker build must be run from the project root directory (`klavis/`): - -```bash -# Navigate to the root directory of the project -cd /path/to/klavis - -# Build the Docker image -docker build -t pandoc-mcp-server -f mcp_servers/pandoc/Dockerfile . +```python +from klavis import Klavis -# Run the container (with Google Cloud credentials mounted) -docker run -p 5000:5000 --rm -v ~/.config/gcloud:/root/.config/gcloud pandoc-mcp-server +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("PANDOC", "user123") ``` -To use your local .env file instead of building it into the image: +### 🐳 Using Docker (For Self-Hosting) ```bash -docker run -p 5000:5000 --rm --env-file mcp_servers/pandoc/.env -v ~/.config/gcloud:/root/.config/gcloud pandoc-mcp-server -``` +# Pull latest image +docker pull ghcr.io/klavis-ai/pandoc-mcp-server:latest -### Option 2: Python Virtual Environment - -```bash -# Create and activate virtual environment -python -m venv venv -source venv/bin/activate # On Windows: venv\Scripts\activate -# Install dependencies -pip install -r requirements.txt - -# Run the server -python server.py +# Run Pandoc MCP Server (no authentication required) +docker run -p 5000:5000 \ + ghcr.io/klavis-ai/pandoc-mcp-server:latest ``` -Once running, the server will be accessible at `http://localhost:5000`. - -## šŸ”Œ API Usage - -The server implements the Model Context Protocol (MCP) standard. Here's an example of how to call a tool: - -```python -import httpx - -async def call_pandoc_tool(): - url = "http://localhost:5000/execute" - payload = { - "tool_name": "convert_markdown_to_file", - "tool_args": { - "markdown_text": "# Hello World\n\nThis is a test document.", - "output_format": "pdf" - } - } - - async with httpx.AsyncClient() as client: - response = await client.post(url, json=payload) - result = response.json() - return result -``` +**No Authentication:** Pandoc document conversion typically requires no external authentication. -## šŸ“‹ Common Operations +## šŸ› ļø Available Tools -### Converting Markdown to PDF +- **Document Conversion**: Convert between various document formats +- **Format Support**: Handle Markdown, HTML, PDF, Word, LaTeX, and more +- **Template Processing**: Use custom templates for document generation +- **Batch Processing**: Convert multiple documents efficiently +- **Format Options**: Configure conversion options and output settings -```python -payload = { - "tool_name": "convert_markdown_to_file", - "tool_args": { - "markdown_text": "# Hello World\n\nThis is a test document.", - "output_format": "pdf" - } -} -``` +## šŸ“š Documentation & Support -### Converting Markdown to DOCX - -```python -payload = { - "tool_name": "convert_markdown_to_file", - "tool_args": { - "markdown_text": "# Hello World\n\nThis is a test document.", - "output_format": "docx" - } -} -``` - -## šŸ› ļø Troubleshooting - -### Docker Build Issues - -- **File Not Found Errors**: If you see errors like `failed to compute cache key: failed to calculate checksum of ref: not found`, this means Docker can't find the files referenced in the Dockerfile. Make sure you're building from the root project directory (`klavis/`), not from the server directory. - -### Common Runtime Issues - -- **GCS Bucket Not Found**: Verify your GCS bucket exists and you have the correct permissions -- **Google Cloud Authentication**: Ensure your ADC is properly configured -- **PDF Generation Issues**: Avoid using emojis or complex characters in markdown text when converting to PDF, as they might cause issues with LaTeX -- **Missing LaTeX**: For local setup, ensure a LaTeX distribution is installed if you need PDF conversion +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | ## šŸ¤ Contributing -Contributions are welcome! Please feel free to submit a Pull Request. - -1. Fork the repository -2. Create your feature branch (`git checkout -b feature/amazing-feature`) -3. Commit your changes (`git commit -m 'Add some amazing feature'`) -4. Push to the branch (`git push origin feature/amazing-feature`) -5. Open a Pull Request +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. ## šŸ“œ License -This project is licensed under the MIT License - see the LICENSE file for details. +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/pandoc/requirements.txt b/mcp_servers/pandoc/requirements.txt index 688a9b46..21a323e0 100644 --- a/mcp_servers/pandoc/requirements.txt +++ b/mcp_servers/pandoc/requirements.txt @@ -1,4 +1,4 @@ -mcp>=1.6.0 +mcp==1.11.0 pydantic>=2.0.0 fastapi>=0.100.0 uvicorn[standard]>=0.22.0 diff --git a/mcp_servers/pandoc/server.py b/mcp_servers/pandoc/server.py index f9c6c082..cbe51351 100644 --- a/mcp_servers/pandoc/server.py +++ b/mcp_servers/pandoc/server.py @@ -1,26 +1,37 @@ -from mcp.server.fastmcp import FastMCP -import pypandoc -from google.cloud import storage -from google.cloud.exceptions import NotFound +import os import logging -from dotenv import load_dotenv +import contextlib import uuid import tempfile +from collections.abc import AsyncIterator from typing import Annotated -from pydantic import Field + +import click +import pypandoc import datetime import google.auth from google.auth.transport import requests -import os +from google.cloud import storage +from google.cloud.exceptions import NotFound +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from pydantic import Field load_dotenv() -logger = logging.getLogger(__name__) -mcp = FastMCP( - "Pandoc", - instructions="Using pandoc to convert markdown text to pdf, microsoft word and html files.", - port=5000, -) +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("pandoc-mcp-server") + +# Default port configuration +PANDOC_MCP_SERVER_PORT = int(os.getenv("PANDOC_MCP_SERVER_PORT", "5000")) def upload_blob_and_get_signed_url( @@ -78,25 +89,17 @@ def upload_blob_and_get_signed_url( return None -@mcp.tool() -async def convert_markdown_to_file( - markdown_text: Annotated[ - str, Field(description="The text in markdown format to convert.") - ], - output_format: Annotated[ - str, - Field( - description="The format to convert the markdown to. Must be one of pdf, docx, doc, html, html5." - ), - ], -) -> str: - """Convert markdown text to pdf, microsoft word and html files. Returns the url of the converted file. - For pdf, it uses pdflatex to generate the pdf file. Therefore, for pdf please DO NOT use emoji in the markdown text. - +async def convert_markdown_to_file(markdown_text: str, output_format: str) -> str: + """ + Convert markdown text to pdf, microsoft word and html files. + + Args: + markdown_text: The text in markdown format to convert + output_format: The format to convert the markdown to (pdf, docx, doc, html, html5) + Returns: The converted file url. """ - if output_format not in ["pdf", "docx", "doc", "html", "html5"]: return f"Unsupported format. Only pdf, docx, doc, html and html5 are supported." with tempfile.NamedTemporaryFile( @@ -118,8 +121,162 @@ async def convert_markdown_to_file( return url -def main(): - mcp.run(transport="sse") +@click.command() +@click.option("--port", default=PANDOC_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server( + "pandoc-mcp-server", + instructions="Using pandoc to convert markdown text to pdf, microsoft word and html files.", + ) + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="convert_markdown_to_file", + description="Convert markdown text to pdf, microsoft word and html files. Returns the url of the converted file.", + inputSchema={ + "type": "object", + "required": ["markdown_text", "output_format"], + "properties": { + "markdown_text": { + "type": "string", + "description": "The text in markdown format to convert." + }, + "output_format": { + "type": "string", + "description": "The format to convert the markdown to. Must be one of pdf, docx, doc, html, html5." + } + }, + }, + annotations=types.ToolAnnotations(**{"category": "PANDOC_CONVERT"}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "convert_markdown_to_file": + markdown_text = arguments.get("markdown_text") + output_format = arguments.get("output_format") + + if not markdown_text or not output_format: + return [ + types.TextContent( + type="text", + text="Error: Both markdown_text and output_format parameters are required", + ) + ] + + try: + result = await convert_markdown_to_file(markdown_text, output_format) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 if __name__ == "__main__": diff --git a/mcp_servers/perplexity_ai/.env.example b/mcp_servers/perplexity_ai/.env.example new file mode 100644 index 00000000..9b291505 --- /dev/null +++ b/mcp_servers/perplexity_ai/.env.example @@ -0,0 +1,2 @@ +PERPLEXITY_API_KEY= +PERPLEXITY_MCP_SERVER_PORT=5000 \ No newline at end of file diff --git a/mcp_servers/perplexity_ai/Dockerfile b/mcp_servers/perplexity_ai/Dockerfile new file mode 100644 index 00000000..c10c0349 --- /dev/null +++ b/mcp_servers/perplexity_ai/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install -r requirements.txt + +# Copy application code +COPY . . + +# Expose the port +EXPOSE 5000 + +# Run the server +CMD ["python", "server.py", "--port", "5000"] diff --git a/mcp_servers/perplexity_ai/README.md b/mcp_servers/perplexity_ai/README.md new file mode 100644 index 00000000..2b9a1ecc --- /dev/null +++ b/mcp_servers/perplexity_ai/README.md @@ -0,0 +1,161 @@ +# Perplexity AI MCP Server + +A Model Context Protocol (MCP) server for performing real-time web search and reasoning using Perplexity AI's Sonar models. Provides focused tools with automatic citation handling. + +## Features + +- **Web search**: Uses Perplexity `sonar-pro` for up-to-date results +- **Reasoning**: Uses Perplexity `sonar-reasoning-pro` for structured analysis +- **Citations**: Automatically extracts and appends numbered citations +- **Conversation context**: Accepts `system`/`user`/`assistant` messages +- **Dual transport**: Supports both SSE and StreamableHTTP +- **MCP compatible**: Implements the standard MCP server interface + +## Available Tools + +### `perplexity_search` +Performs a web search via Perplexity Sonar models. + +- **Model**: `sonar-pro` +- **Parameters**: + - `messages` (array of objects, required): Each item has `role` and `content` +- **Returns**: Text with result content and appended citations when available +- **Best for**: Research queries, real-time facts, current events + +### `perplexity_reason` +Performs reasoning tasks using Perplexity's reasoning-optimized model. + +- **Model**: `sonar-reasoning-pro` +- **Parameters**: + - `messages` (array of objects, required): Each item has `role` and `content` +- **Returns**: Well-reasoned response with citations when available +- **Best for**: Multi-step reasoning and structured analysis + +## Prerequisites + +- Python 3.11+ +- Perplexity AI API key (create in Perplexity settings) +- Docker (optional) + +## Configuration + +Create a `.env` file in `mcp_servers/perplexity_ai/` for server config: + +```bash +PERPLEXITY_MCP_SERVER_PORT=5000 +``` + +API keys are provided via request header (see Authentication). The server does not read the API key from `.env`. + +## Authentication + +Provide your Perplexity API key in the request header: + +``` +x-api-key: YOUR_PERPLEXITY_API_KEY +``` + +## Running the Server + +### Direct Python + +```bash +cd mcp_servers/perplexity_ai +python -m venv venv +source venv/bin/activate # Windows: venv\Scripts\activate +pip install -r requirements.txt +python server.py --port 5000 --log-level INFO +``` + +### Docker (from repo root) + +```bash +docker build -t perplexity-mcp-server -f mcp_servers/perplexity_ai/Dockerfile . +docker run -p 5000:5000 perplexity-mcp-server +``` + +### Command line options + +- `--port`: Port to listen on (default: 5000) +- `--log-level`: `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` +- `--json-response`: Return JSON for StreamableHTTP instead of SSE streams + +## API Endpoints + +- `/sse` — Server-Sent Events endpoint +- `/messages/` — SSE message handling endpoint +- `/mcp` — StreamableHTTP endpoint (JSON-RPC over HTTP) + +## Usage + +### Call via HTTP + +```bash +curl -X POST \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: YOUR_PERPLEXITY_API_KEY" \ + -d '{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"perplexity_search","arguments":{"messages":[{"role":"user","content":"What is quantum computing?"}]}}}' \ + http://localhost:5000/mcp +``` + +### Example (client-side tool call) + +```python +response = await client.call_tool( + "perplexity_search", + { + "messages": [ + {"role": "system", "content": "You are a research assistant."}, + {"role": "user", "content": "Latest developments in AI this week"} + ] + } +) + +response = await client.call_tool( + "perplexity_reason", + { + "messages": [ + {"role": "user", "content": "Reason step by step: pros and cons of nuclear vs solar for baseload"} + ] + } +) +``` + +## Response format + +The tool returns text content. When citations are present, they are appended as numbered references: + +``` +...answer content... + +Citations: +[1] https://example.com/source-1 +[2] https://example.com/source-2 +``` + +## Error handling + +Common errors include: + +- Invalid or missing `messages` +- Missing/invalid API key +- Upstream API rate limits or network failures + +All responses follow standard MCP error formatting. + +Note: For the StreamableHTTP endpoint, clients must set the `Accept` header to include both `application/json` and `text/event-stream`. + +## Dependencies + +- mcp>=1.12.0 +- fastapi +- starlette +- uvicorn[standard] +- httpx +- click +- python-dotenv + +## License + +This project follows the same license as the parent Klavis repository. \ No newline at end of file diff --git a/mcp_servers/perplexity_ai/requirements.txt b/mcp_servers/perplexity_ai/requirements.txt new file mode 100644 index 00000000..cc857a94 --- /dev/null +++ b/mcp_servers/perplexity_ai/requirements.txt @@ -0,0 +1,10 @@ +mcp>=1.12.0 +pydantic +fastapi +uvicorn[standard] +python-dotenv +typing-extensions +requests +httpx +click +starlette diff --git a/mcp_servers/perplexity_ai/server.py b/mcp_servers/perplexity_ai/server.py new file mode 100644 index 00000000..2ea43c4c --- /dev/null +++ b/mcp_servers/perplexity_ai/server.py @@ -0,0 +1,244 @@ +import contextlib +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + perplexity_search, + perplexity_reason, +) + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +PERPLEXITY_MCP_SERVER_PORT = int(os.getenv("PERPLEXITY_MCP_SERVER_PORT", "5000")) + +@click.command() +@click.option("--port", default=PERPLEXITY_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("perplexity-ai-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Perplexity Search Tool + types.Tool( + name="perplexity_search", + description=( + "Performs web search using the Sonar API. " + "Accepts an array of messages (each with a role and content) " + "and returns a search completion response from the Perplexity model." + ), + inputSchema={ + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "role": { + "type": "string", + "description": "Role of the message (e.g., system, user, assistant)", + }, + "content": { + "type": "string", + "description": "The content of the message", + }, + }, + "required": ["role", "content"], + }, + "description": "Array of conversation messages", + }, + }, + "required": ["messages"], + }, + ), + # Perplexity Reason Tool + types.Tool( + name="perplexity_reason", + description=( + "Performs reasoning tasks using the Sonar API. " + "Accepts an array of messages (each with a role and content) " + "and returns a well‑reasoned response using the sonar‑reasoning‑pro model." + ), + inputSchema={ + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "role": { + "type": "string", + "description": "Role of the message (e.g., system, user, assistant)", + }, + "content": { + "type": "string", + "description": "The content of the message", + }, + }, + "required": ["role", "content"], + }, + "description": "Array of conversation messages", + }, + }, + "required": ["messages"], + }, + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + try: + if name == "perplexity_search": + messages = arguments.get("messages") + if not messages or not isinstance(messages, list): + return [types.TextContent(type="text", text="Error: 'messages' parameter is required and must be an array")] + + result = await perplexity_search(messages) + return [types.TextContent(type="text", text=result)] + + elif name == "perplexity_reason": + messages = arguments.get("messages") + if not messages or not isinstance(messages, list): + return [types.TextContent(type="text", text="Error: 'messages' parameter is required and must be an array")] + + result = await perplexity_reason(messages) + return [types.TextContent(type="text", text=result)] + + else: + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract API key from headers (allow None - will be handled at tool level) + api_key = request.headers.get('x-api-key') + + # Set the API key in context for this request (can be None) + token = auth_token_context.set(api_key or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers (allow None - will be handled at tool level) + headers = dict(scope.get("headers", [])) + api_key = headers.get(b'x-api-key') + if api_key: + api_key = api_key.decode('utf-8') + + # Set the API key in context for this request (can be None/empty) + token = auth_token_context.set(api_key or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Perplexity AI MCP Server started with dual transports!") + try: + yield + finally: + logger.info("Perplexity AI MCP Server shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Perplexity AI MCP Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/perplexity_ai/tools/__init__.py b/mcp_servers/perplexity_ai/tools/__init__.py new file mode 100644 index 00000000..16e625f2 --- /dev/null +++ b/mcp_servers/perplexity_ai/tools/__init__.py @@ -0,0 +1,17 @@ +# Perplexity AI MCP Server Tools +# This package contains all the tool implementations for Perplexity AI + +from .search import ( + perplexity_search, + perplexity_reason, +) +from .base import auth_token_context + +__all__ = [ + # Perplexity tools + "perplexity_search", + "perplexity_reason", + + # Base + "auth_token_context", +] diff --git a/mcp_servers/perplexity_ai/tools/base.py b/mcp_servers/perplexity_ai/tools/base.py new file mode 100644 index 00000000..ec02b190 --- /dev/null +++ b/mcp_servers/perplexity_ai/tools/base.py @@ -0,0 +1,50 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +PERPLEXITY_API_BASE_URL = "https://api.perplexity.ai/" + +# Context variable to store the API key for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_api_key() -> str: + """Get the API key from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("API key not found in request context") + +async def make_perplexity_request( + endpoint: str, + method: str = "POST", + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make a REST API request to Perplexity AI API.""" + api_key = get_api_key() + + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + } + + url = f"{PERPLEXITY_API_BASE_URL}/{endpoint.lstrip('/')}" + + async with httpx.AsyncClient(timeout=60.0) as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data, params=params) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data, params=params) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers, params=params) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + response.raise_for_status() + return response.json() diff --git a/mcp_servers/perplexity_ai/tools/search.py b/mcp_servers/perplexity_ai/tools/search.py new file mode 100644 index 00000000..03451dbc --- /dev/null +++ b/mcp_servers/perplexity_ai/tools/search.py @@ -0,0 +1,74 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import make_perplexity_request + +# Configure logging +logger = logging.getLogger(__name__) + +async def perform_chat_completion( + messages: List[Dict[str, str]], + model: str = "sonar-pro" +) -> str: + """ + Performs a chat completion by sending a request to the Perplexity API. + Appends citations to the returned message content if they exist. + + Args: + messages: An array of message objects with role and content + model: The model to use for the completion + + Returns: + The chat completion result with appended citations + """ + try: + data = { + "model": model, + "messages": messages + } + + result = await make_perplexity_request("chat/completions", "POST", data) + + # Get the main message content from the response + message_content = result.get("choices", [{}])[0].get("message", {}).get("content", "") + + # If citations are provided, append them to the message content + citations = result.get("citations", []) + if citations and isinstance(citations, list) and len(citations) > 0: + message_content += "\n\nCitations:\n" + for index, citation in enumerate(citations): + message_content += f"[{index + 1}] {citation}\n" + + return message_content + + except Exception as e: + logger.error(f"Error in chat completion: {str(e)}") + raise e + +async def perplexity_search(messages: List[Dict[str, str]]) -> str: + """ + Performs web search using the Sonar API. + Accepts an array of messages (each with a role and content) + and returns a search completion response from the Perplexity model. + + Args: + messages: Array of conversation messages with role and content + + Returns: + Search completion response with citations if available + """ + return await perform_chat_completion(messages, "sonar-pro") + + + +async def perplexity_reason(messages: List[Dict[str, str]]) -> str: + """ + Performs reasoning tasks using Perplexity's reasoning-optimized model. + Accepts an array of messages and returns a reasoned response. + + Args: + messages: Array of conversation messages with role and content + + Returns: + Reasoned response with citations if available + """ + return await perform_chat_completion(messages, "sonar-reasoning-pro") diff --git a/mcp_servers/postgres/.eslintrc.json b/mcp_servers/postgres/.eslintrc.json new file mode 100644 index 00000000..6a1b1376 --- /dev/null +++ b/mcp_servers/postgres/.eslintrc.json @@ -0,0 +1,11 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/postgres/Dockerfile b/mcp_servers/postgres/Dockerfile index e6b8a3f8..d815fe2b 100755 --- a/mcp_servers/postgres/Dockerfile +++ b/mcp_servers/postgres/Dockerfile @@ -1,12 +1,19 @@ FROM node:22.12-alpine AS builder -COPY mcp_servers/postgres /app - +# Set the working directory inside the container WORKDIR /app -RUN --mount=type=cache,target=/root/.npm npm install +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/postgres/package.json mcp_servers/postgres/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/postgres . -RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev +# Build the application using TypeScript +RUN npm run build FROM node:22-alpine AS release diff --git a/mcp_servers/postgres/README.md b/mcp_servers/postgres/README.md index a3592713..4bf94579 100644 --- a/mcp_servers/postgres/README.md +++ b/mcp_servers/postgres/README.md @@ -1,103 +1,75 @@ # PostgreSQL MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +A Model Context Protocol (MCP) server for PostgreSQL database integration. Execute read-only SQL queries and inspect database schemas safely. -This is a Model Context Protocol (MCP) server designed to provide read-only access to PostgreSQL databases. It enables Large Language Models (LLMs) and other compatible clients to interact with your database by inspecting schemas and executing safe, read-only SQL queries. +## šŸš€ Quick Start - Run in 30 Seconds -This server is based on the reference implementation from [modelcontextprotocol/servers](https://github.com/modelcontextprotocol/servers/tree/main/src/postgres). +### 🌐 Using Hosted Service (Recommended for Production) -## License +Get instant access to PostgreSQL with our managed infrastructure - **no setup required**: -This MCP server is licensed under the **MIT License**. You are free to use, modify, and distribute the software under the terms of this license. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -## Components +```bash +pip install klavis +# or +npm install klavis +``` -### Tools +```python +from klavis import Klavis -* **`query`** - * **Description:** Executes a read-only SQL query against the connected PostgreSQL database. - * **Input:** `{"sql": "SELECT * FROM your_table LIMIT 10;"}` (A JSON object containing the SQL query string). - * **Output:** A JSON array containing the query results. - * **Note:** All queries are automatically wrapped in a `BEGIN TRANSACTION READ ONLY` block to ensure no data modification occurs. +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("POSTGRES", "user123") +``` -### Resources +### 🐳 Using Docker (For Self-Hosting) -The server exposes the schema of publicly accessible tables as resources: +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/postgres-mcp-server:latest -* **Table Schemas:** (`postgres://///schema`) - * Provides JSON schema information for each table in the `public` schema of the connected database. - * Includes column names and their corresponding data types. - * Automatically discovered from database metadata (`information_schema`). -## Prerequisites +# Run PostgreSQL MCP Server +docker run -p 5000:5000 \ + -e API_KEY="postgresql://user:password@host:port/database" \ + ghcr.io/klavis-ai/postgres-mcp-server:latest +``` -* **Node.js and npm:** Required for running locally without Docker (v18+ recommended). -* **Docker:** (Optional, Recommended) For containerized deployment. -* **PostgreSQL Database:** An accessible PostgreSQL database instance. +**Connection Setup:** Provide your PostgreSQL connection string via `API_KEY`. The server automatically wraps all queries in read-only transactions for safety. -## Setup +## šŸ› ļø Available Tools -1. **Clone the Repository:** If you haven't already, clone the main Klavis AI repository. -2. **Configure Environment Variables:** - * This server requires the connection string for your PostgreSQL database. - * Navigate to the `mcp_servers/postgres` directory. - * Copy the example environment file: `cp .env.example .env` - * Edit the `.env` file and set the `DATABASE_URL` variable to your PostgreSQL connection string. - ```env - # .env - DATABASE_URL=postgresql://:@:/ - ``` - Replace ``, ``, ``, ``, and `` with your actual database credentials and details. +- **SQL Queries**: Execute safe, read-only SQL queries +- **Schema Inspection**: Browse table schemas and column information +- **Database Resources**: Access table metadata and structure information +- **Query Results**: Get structured JSON results from SQL queries -## Running the Server +All queries are automatically wrapped in `BEGIN TRANSACTION READ ONLY` to ensure no data modification occurs. -You can run the server using Docker (recommended) or directly with Node.js/npm. +## šŸ“š Documentation & Support -### Using Docker (Recommended) +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -1. **Build the Docker Image:** - * Navigate to the **root directory** of the Klavis AI repository (the one containing the `mcp_servers` folder). - * Run the build command: - ```bash - docker build -t klavis-ai/mcp-server-postgres -f mcp_servers/postgres/Dockerfile . - ``` - * `-t klavis-ai/mcp-server-postgres`: Tags the image with a descriptive name. - * `-f mcp_servers/postgres/Dockerfile`: Specifies the path to the Dockerfile. - * `.`: Specifies the build context (the root directory). +## šŸ¤ Contributing -2. **Run the Docker Container:** - * Use the `DATABASE_URL` you configured in the `.env` file. - * Run the container, exposing port 5000: - ```bash - # Make sure to replace the example URL with your actual DATABASE_URL - docker run -p 5000:5000 -e DATABASE_URL="postgresql://user:password@host:port/db" --rm klavis-ai/mcp-server-postgres - ``` - * `-p 5000:5000`: Maps port 5000 on your host to port 5000 in the container. - * `-e DATABASE_URL="..."`: Passes the database connection string as an environment variable *directly* to the container. Ensure this is properly quoted. - * `--rm`: Removes the container when it stops. - * `klavis-ai/mcp-server-postgres`: The name of the image you built. +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. - The server should now be running and accessible at `http://localhost:5000`. +## šŸ“œ License -### Using NPM (Manual / Development) +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. -1. **Navigate to the Server Directory:** - ```bash - cd mcp_servers/postgres - ``` -2. **Install Dependencies:** - ```bash - npm install - ``` -3. **Ensure `.env` is Configured:** Make sure you have created and configured the `.env` file as described in the Setup section. The `npm start` script will automatically load it using `dotenv`. -4. **Start the Server:** - ```bash - npm start - ``` - This command first compiles the TypeScript code (`tsc`) and then runs the server (`node dist/index.js`). +--- - The server should now be running and accessible at `http://localhost:5000`. - -## Usage with MCP Clients - -Once the server is running, you can connect to it using any MCP-compatible client. The specific configuration will depend on the client, but it generally involves pointing the client to the server's address (e.g., `http://localhost:5000` if running locally). The client can then list available resources/tools and interact with the `query` tool or read schema resources. \ No newline at end of file +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/postgres/index.ts b/mcp_servers/postgres/index.ts index cabbf80e..dd70d027 100755 --- a/mcp_servers/postgres/index.ts +++ b/mcp_servers/postgres/index.ts @@ -1,7 +1,8 @@ #!/usr/bin/env node -import express from "express"; +import express, { Request, Response } from 'express'; import { Server } from "@modelcontextprotocol/sdk/server/index.js"; import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import { CallToolRequestSchema, ListResourcesRequestSchema, @@ -15,133 +16,236 @@ import dotenv from 'dotenv'; // Load environment variables dotenv.config(); -const server = new Server( - { - name: "klavis-ai/postgres", - version: "0.1.0", - }, - { - capabilities: { - resources: {}, - tools: {}, - }, - }, -); - const SCHEMA_PATH = "schema"; -server.setRequestHandler(ListResourcesRequestSchema, async () => { - const client = await getPool().connect(); - try { - const result = await client.query( - "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'", - ); - return { - resources: result.rows.map((row) => ({ - uri: new URL(`${row.table_name}/${SCHEMA_PATH}`, getResourceBaseUrl()).href, - mimeType: "application/json", - name: `"${row.table_name}" database schema`, - })), - }; - } finally { - client.release(); - } -}); +const getPostgresMcpServer = () => { + const server = new Server( + { + name: "klavis-ai/postgres", + version: "0.1.0", + }, + { + capabilities: { + resources: {}, + tools: {}, + }, + }, + ); + server.setRequestHandler(ListResourcesRequestSchema, async () => { + const client = await getPool().connect(); + try { + const result = await client.query( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'", + ); + return { + resources: result.rows.map((row) => ({ + uri: new URL(`${row.table_name}/${SCHEMA_PATH}`, getResourceBaseUrl()).href, + mimeType: "application/json", + name: `"${row.table_name}" database schema`, + })), + }; + } finally { + client.release(); + } + }); -server.setRequestHandler(ReadResourceRequestSchema, async (request) => { - const resourceUrl = new URL(request.params.uri); + server.setRequestHandler(ReadResourceRequestSchema, async (request) => { + const resourceUrl = new URL(request.params.uri); - const pathComponents = resourceUrl.pathname.split("/"); - const schema = pathComponents.pop(); - const tableName = pathComponents.pop(); + const pathComponents = resourceUrl.pathname.split("/"); + const schema = pathComponents.pop(); + const tableName = pathComponents.pop(); - if (schema !== SCHEMA_PATH) { - throw new Error("Invalid resource URI"); - } + if (schema !== SCHEMA_PATH) { + throw new Error("Invalid resource URI"); + } - const client = await getPool().connect(); - try { - const result = await client.query( - "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1", - [tableName], - ); + const client = await getPool().connect(); + try { + const result = await client.query( + "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1", + [tableName], + ); + return { + contents: [ + { + uri: request.params.uri, + mimeType: "application/json", + text: JSON.stringify(result.rows, null, 2), + }, + ], + }; + } finally { + client.release(); + } + }); + + server.setRequestHandler(ListToolsRequestSchema, async () => { return { - contents: [ + tools: [ { - uri: request.params.uri, - mimeType: "application/json", - text: JSON.stringify(result.rows, null, 2), + name: "query", + description: "Run a read-only SQL query", + inputSchema: { + type: "object", + properties: { + sql: { type: "string" }, + }, + }, + annotations: { + category: "POSTGRES_QUERY", + readOnlyHint: true, + }, }, ], }; - } finally { - client.release(); - } -}); - -server.setRequestHandler(ListToolsRequestSchema, async () => { - return { - tools: [ - { - name: "query", - description: "Run a read-only SQL query", - inputSchema: { - type: "object", - properties: { - sql: { type: "string" }, - }, - }, - }, - ], - }; -}); + }); -server.setRequestHandler(CallToolRequestSchema, async (request) => { - if (request.params.name === "query") { - const sql = request.params.arguments?.sql as string; + server.setRequestHandler(CallToolRequestSchema, async (request) => { + if (request.params.name === "query") { + const sql = request.params.arguments?.sql as string; - const client = await getPool().connect(); - try { - await client.query("BEGIN TRANSACTION READ ONLY"); - const result = await client.query(sql); - return { - content: [{ type: "text", text: JSON.stringify(result.rows, null, 2) }], - isError: false, - }; - } catch (error) { - throw error; - } finally { - client - .query("ROLLBACK") - .catch((error) => - console.warn("Could not roll back transaction:", error), - ); + const client = await getPool().connect(); + try { + await client.query("BEGIN TRANSACTION READ ONLY"); + const result = await client.query(sql); + return { + content: [{ type: "text", text: JSON.stringify(result.rows, null, 2) }], + isError: false, + }; + } catch (error) { + throw error; + } finally { + client + .query("ROLLBACK") + .catch((error) => + console.warn("Could not roll back transaction:", error), + ); - client.release(); + client.release(); + } } - } - throw new Error(`Unknown tool: ${request.params.name}`); -}); - -const app = express(); + throw new Error(`Unknown tool: ${request.params.name}`); + }); -const transports = new Map(); + return server; +} -// Create AsyncLocalStorage for request context +// Create AsyncLocalStorage for request context - now only storing databaseUrl const asyncLocalStorage = new AsyncLocalStorage<{ - resourceBaseUrl: URL; - pool: pg.Pool; + databaseUrl: string; }>(); +// Create resourceBaseUrl when needed function getResourceBaseUrl() { - return asyncLocalStorage.getStore()!.resourceBaseUrl; + const databaseUrl = asyncLocalStorage.getStore()!.databaseUrl; + const resourceBaseUrl = new URL(databaseUrl); + resourceBaseUrl.protocol = "postgres:"; + resourceBaseUrl.password = ""; + return resourceBaseUrl; } +// Create pool when needed function getPool() { - return asyncLocalStorage.getStore()!.pool; + const databaseUrl = asyncLocalStorage.getStore()!.databaseUrl; + return new pg.Pool({ + connectionString: databaseUrl, + }); +} + +function extractApiKey(req: Request): string { + let authData = process.env.API_KEY; + + if (authData) { + return authData; + } + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Postgres database URL is missing. Provide it via API_KEY env var or x-auth-data header with token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.token ?? authDataJson.api_key ?? ''; } +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const databaseUrl = extractApiKey(req); + + const server = getPostgresMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ databaseUrl }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= +const transports = new Map(); + app.get("/sse", async (req, res) => { const transport = new SSEServerTransport(`/messages`, res); @@ -156,6 +260,7 @@ app.get("/sse", async (req, res) => { transports.set(transport.sessionId, transport); + const server = getPostgresMcpServer(); await server.connect(transport); console.log(`SSE connection established with transport: ${transport.sessionId}`); @@ -167,23 +272,9 @@ app.post("/messages", async (req, res) => { let transport: SSEServerTransport | undefined; transport = sessionId ? transports.get(sessionId) : undefined; if (transport) { - // Use DATABASE_URL from environment if available, otherwise fall back to header - const databaseUrl = process.env.DATABASE_URL || req.headers['x-auth-token'] as string; - - if (!databaseUrl) { - console.error('No database URL provided in environment or headers'); - res.status(400).send({ error: "No database URL provided" }); - return; - } + const databaseUrl = extractApiKey(req); - const resourceBaseUrl = new URL(databaseUrl); - resourceBaseUrl.protocol = "postgres:"; - resourceBaseUrl.password = ""; - - const pool = new pg.Pool({ - connectionString: databaseUrl, - }); - asyncLocalStorage.run({ resourceBaseUrl, pool }, async () => { + asyncLocalStorage.run({ databaseUrl }, async () => { await transport.handlePostMessage(req, res); }); } else { diff --git a/mcp_servers/postgres/package-lock.json b/mcp_servers/postgres/package-lock.json index addd5e25..cf3db7b4 100644 --- a/mcp_servers/postgres/package-lock.json +++ b/mcp_servers/postgres/package-lock.json @@ -1,15 +1,16 @@ { - "name": "@modelcontextprotocol/server-postgres", + "name": "@klavis-ai/mcp-server-postgres", "version": "0.6.2", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "@modelcontextprotocol/server-postgres", + "name": "@klavis-ai/mcp-server-postgres", "version": "0.6.2", "license": "MIT", "dependencies": { - "@modelcontextprotocol/sdk": "1.8.0", + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.5", "express": "^4.21.2", "pg": "^8.13.0" }, @@ -25,18 +26,19 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.8.0.tgz", - "integrity": "sha512-e06W7SwrontJDHwCawNO5SGxG+nU9AAx+jpHHZqGl/WrDBdWOpvirC+s58VpJTB5QemI4jTRcjWT4Pt3Q1NPQQ==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.1.tgz", + "integrity": "sha512-8q6+9aF0yA39/qWT/uaIj6zTpC+Qu07DnN/lb9mjoquCJsAh6l3HyYqc9O3t2j7GilseOQOQimLg7W3By6jqvg==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", - "pkce-challenge": "^4.1.0", + "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" @@ -439,6 +441,22 @@ "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -650,6 +668,18 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/dotenv": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", + "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -806,6 +836,18 @@ "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, "node_modules/finalhandler": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", @@ -1046,6 +1088,12 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "license": "ISC" }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -1401,9 +1449,9 @@ } }, "node_modules/pkce-challenge": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-4.1.0.tgz", - "integrity": "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", "license": "MIT", "engines": { "node": ">=16.20.0" @@ -1472,6 +1520,15 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.13.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", @@ -1889,6 +1946,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", diff --git a/mcp_servers/postgres/package.json b/mcp_servers/postgres/package.json index 85efd41a..15bb2c76 100755 --- a/mcp_servers/postgres/package.json +++ b/mcp_servers/postgres/package.json @@ -19,7 +19,7 @@ "watch": "tsc --watch" }, "dependencies": { - "@modelcontextprotocol/sdk": "1.8.0", + "@modelcontextprotocol/sdk": "^1.12.1", "dotenv": "^16.4.5", "pg": "^8.13.0", "express": "^4.21.2" diff --git a/mcp_servers/quickbooks/.env.example b/mcp_servers/quickbooks/.env.example new file mode 100644 index 00000000..a1817f79 --- /dev/null +++ b/mcp_servers/quickbooks/.env.example @@ -0,0 +1,11 @@ +# QuickBooks MCP Server Environment Variables + +# Server Configuration +QB_MCP_SERVER_PORT=5000 + +# QuickBooks OAuth Credentials +QB_ACCESS_TOKEN=your_access_token_here +QB_REALM_ID=your_company_realm_id_here + +# Environment: 'sandbox' for testing, 'production' for live data +QB_ENVIRONMENT=sandbox \ No newline at end of file diff --git a/mcp_servers/quickbooks/Dockerfile b/mcp_servers/quickbooks/Dockerfile new file mode 100644 index 00000000..bb8d5653 --- /dev/null +++ b/mcp_servers/quickbooks/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/quickbooks/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/quickbooks/server.py . +COPY mcp_servers/quickbooks/errors.py . +COPY mcp_servers/quickbooks/session_manager.py . +COPY mcp_servers/quickbooks/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/quickbooks/README.md b/mcp_servers/quickbooks/README.md new file mode 100644 index 00000000..595378d7 --- /dev/null +++ b/mcp_servers/quickbooks/README.md @@ -0,0 +1,78 @@ +# QuickBooks MCP Server + +A Model Context Protocol (MCP) server for QuickBooks integration. Manage accounting data, invoices, and financial transactions using QuickBooks API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to QuickBooks with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("QUICKBOOKS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/quickbooks-mcp-server:latest + + +# Run QuickBooks MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/quickbooks-mcp-server:latest + + +# Run QuickBooks MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_quickbooks_access_token_here"}' \ + ghcr.io/klavis-ai/quickbooks-mcp-server:latest +``` + +**OAuth Setup:** QuickBooks requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Invoice Management**: Create, read, update, and send invoices +- **Customer Management**: Manage customer and vendor information +- **Financial Reporting**: Access financial reports and accounting data +- **Transaction Processing**: Handle payments and financial transactions +- **Tax Operations**: Manage tax calculations and reporting + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/quickbooks/errors.py b/mcp_servers/quickbooks/errors.py new file mode 100644 index 00000000..77ec3a0b --- /dev/null +++ b/mcp_servers/quickbooks/errors.py @@ -0,0 +1,24 @@ +import traceback + + +class QuickBooksError(Exception): + """ + A custom error wrapper that preserves the original traceback and allows custom error messages. + """ + + def __init__(self, message: str, original_exception: Exception = None): + super().__init__(message) + self.message = message + self.original_exception = original_exception + if original_exception is not None: + self.traceback = traceback.format_exc() + else: + self.traceback = None + + def __str__(self): + base = f"QuickBooksError: {self.message}" + if self.original_exception: + base += f"\nCaused by: {repr(self.original_exception)}" + if self.traceback: + base += f"\nTraceback (most recent call last):\n{self.traceback}" + return base diff --git a/mcp_servers/quickbooks/requirements.txt b/mcp_servers/quickbooks/requirements.txt new file mode 100644 index 00000000..1e3c6f1c --- /dev/null +++ b/mcp_servers/quickbooks/requirements.txt @@ -0,0 +1,5 @@ +mcp[cli]==1.11.0 +fastapi +uvicorn[standard] +starlette +httpx \ No newline at end of file diff --git a/mcp_servers/quickbooks/server.py b/mcp_servers/quickbooks/server.py new file mode 100644 index 00000000..b8b909aa --- /dev/null +++ b/mcp_servers/quickbooks/server.py @@ -0,0 +1,281 @@ +#!/usr/bin/env python3 +""" +QuickBooks MCP Server with SSE and Streamable HTTP Transport + +This server provides MCP tools for interacting with QuickBooks APIs. +Supports both Server-Sent Events (SSE) and Streamable HTTP transport modes. +""" + +import os +import logging +import contextlib +import json +from collections.abc import AsyncIterator +from typing import Any +from contextvars import ContextVar + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import accounts, invoices, customers, payments, vendors +from session_manager import SessionManager + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("quickbooks-mcp-server") + +# Environment configuration +QB_MCP_SERVER_PORT = int(os.getenv("QB_MCP_SERVER_PORT", "5000")) + +# Context variable to store QB credentials for the current request +qb_credentials_context: ContextVar[dict] = ContextVar( + 'qb_credentials', default=None) + +# Initialize session manager +session_manager_instance = SessionManager() + +# Initialize the MCP server +server = Server("quickbooks-mcp-server") + + +@server.list_tools() +async def list_tools() -> list[types.Tool]: + """List available QuickBooks tools.""" + tool_list = [*accounts.tools, *invoices.tools, * + customers.tools, *payments.tools, *vendors.tools] + logger.debug(f"Available tools: {[tool.name for tool in tool_list]}") + return tool_list + + +@server.call_tool() +async def call_tool(name: str, arguments: dict[str, Any]) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """Execute a specific QuickBooks tool.""" + logger.debug(f"Calling tool: {name} with arguments: {arguments}") + + # Extract QuickBooks credentials from arguments if provided + qb_access_token = arguments.pop('qb_access_token', None) + qb_realm_id = arguments.pop('qb_realm_id', None) + qb_environment = arguments.pop('qb_environment', None) + + # If no credentials in arguments, try to get from context (headers) + if not any([qb_access_token, qb_realm_id, qb_environment]): + context_credentials = qb_credentials_context.get() + if context_credentials: + qb_access_token = context_credentials.get('access_token') + qb_realm_id = context_credentials.get('realm_id') + qb_environment = context_credentials.get('environment') + logger.debug("Using QB credentials from request headers") + + try: + # Get session for this request + session = session_manager_instance.get_session( + qb_access_token, qb_realm_id, qb_environment) + except ValueError as e: + return [types.TextContent( + type="text", + text=f"Configuration error: {str(e)}. Please provide qb_access_token and qb_realm_id in the request arguments, or set QB_ACCESS_TOKEN and QB_REALM_ID environment variables, or provide credentials via HTTP headers (x-qb-access-token, x-qb-realm-id, x-qb-environment)." + )] + + # Map tools to session managers + tool_map = { + "quickbooks_list_accounts": session.account_manager.list_accounts, + "quickbooks_get_account": session.account_manager.get_account, + "quickbooks_create_account": session.account_manager.create_account, + "quickbooks_search_accounts": session.account_manager.search_accounts, + "quickbooks_update_account": session.account_manager.update_account, + "quickbooks_create_invoice": session.invoice_manager.create_invoice, + "quickbooks_get_invoice": session.invoice_manager.get_invoice, + "quickbooks_list_invoices": session.invoice_manager.list_invoices, + "quickbooks_update_invoice": session.invoice_manager.update_invoice, + "quickbooks_delete_invoice": session.invoice_manager.delete_invoice, + "quickbooks_send_invoice": session.invoice_manager.send_invoice, + "quickbooks_void_invoice": session.invoice_manager.void_invoice, + "quickbooks_search_invoices": session.invoice_manager.search_invoices, + "quickbooks_create_customer": session.customer_manager.create_customer, + "quickbooks_get_customer": session.customer_manager.get_customer, + "quickbooks_list_customers": session.customer_manager.list_customers, + "quickbooks_search_customers": session.customer_manager.search_customers, + "quickbooks_update_customer": session.customer_manager.update_customer, + "quickbooks_deactivate_customer": session.customer_manager.deactivate_customer, + "quickbooks_activate_customer": session.customer_manager.activate_customer, + "quickbooks_create_payment": session.payment_manager.create_payment, + "quickbooks_get_payment": session.payment_manager.get_payment, + "quickbooks_list_payments": session.payment_manager.list_payments, + "quickbooks_update_payment": session.payment_manager.update_payment, + "quickbooks_delete_payment": session.payment_manager.delete_payment, + "quickbooks_send_payment": session.payment_manager.send_payment, + "quickbooks_void_payment": session.payment_manager.void_payment, + "quickbooks_search_payments": session.payment_manager.search_payments, + "quickbooks_create_vendor": session.vendor_manager.create_vendor, + "quickbooks_get_vendor": session.vendor_manager.get_vendor, + "quickbooks_list_vendors": session.vendor_manager.list_vendors, + "quickbooks_update_vendor": session.vendor_manager.update_vendor, + "quickbooks_activate_vendor": session.vendor_manager.activate_vendor, + "quickbooks_deactivate_vendor": session.vendor_manager.deactivate_vendor, + "quickbooks_search_vendors": session.vendor_manager.search_vendors, + } + + if name not in tool_map: + return [types.TextContent( + type="text", + text=f"Unknown tool: {name}" + )] + + try: + result = await tool_map[name](**arguments) + if name in [ + "quickbooks_create_account", "quickbooks_get_account", "quickbooks_update_account", + "quickbooks_create_customer", "quickbooks_get_customer", "quickbooks_update_customer", "quickbooks_deactivate_customer", "quickbooks_activate_customer", + "quickbooks_create_payment", "quickbooks_get_payment", "quickbooks_update_payment", "quickbooks_delete_payment", "quickbooks_send_payment", "quickbooks_void_payment", + "quickbooks_create_vendor", "quickbooks_get_vendor", "quickbooks_update_vendor", "quickbooks_activate_vendor", "quickbooks_deactivate_vendor" + ]: + if isinstance(result, dict): + return [types.TextContent( + type="text", + text="\n".join(f"{k}: {v}" for k, v in result.items()) + )] + elif name in [ + "quickbooks_list_accounts", "quickbooks_search_accounts", "quickbooks_list_invoices", "quickbooks_search_invoices", + "quickbooks_list_customers", "quickbooks_search_customers", "quickbooks_list_payments", "quickbooks_search_payments", "quickbooks_list_vendors", "quickbooks_search_vendors" + ]: + # Handle list results + if isinstance(result, list): + if not result: + return [types.TextContent(type="text", text="No results found.")] + return [types.TextContent(type="text", text=str(result))] + return [types.TextContent(type="text", text=str(result))] + except Exception as e: + import traceback + logger.error( + f"Error executing tool {name}: {e.message if hasattr(e, 'message') else str(e)}") + logger.error(traceback.format_exc()) + return [types.TextContent( + type="text", + text=f"Error executing tool {name}: {e.message if hasattr(e, 'message') else str(e)}" + )] + + +@click.command() +@click.option("--port", default=QB_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + """Start the QuickBooks MCP server with SSE and Streamable HTTP support.""" + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + """Handle SSE connections with QuickBooks credential extraction.""" + logger.info("Handling SSE connection") + + # Extract QB credentials from headers + qb_access_token, qb_realm_id, qb_environment = session_manager_instance.extract_credentials_from_headers( + request) + + # Store credentials in scope for later use + if any([qb_access_token, qb_realm_id, qb_environment]): + credentials = { + 'access_token': qb_access_token, + 'realm_id': qb_realm_id, + 'environment': qb_environment + } + request.scope['qb_credentials'] = credentials + qb_credentials_context.set(credentials) + logger.info( + f"QB credentials extracted from SSE headers for realm: {qb_realm_id or 'env'}") + + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await server.run( + streams[0], streams[1], server.create_initialization_options() + ) + return Response() + + session_manager = StreamableHTTPSessionManager( + app=server, + event_store=None, # Stateless mode + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + """Handle Streamable HTTP requests with QuickBooks credential extraction.""" + logger.info("Handling StreamableHTTP request") + + # Extract QB credentials from headers + qb_access_token, qb_realm_id, qb_environment = session_manager_instance.extract_credentials_from_headers( + scope) + + # Store credentials in scope for later use + if any([qb_access_token, qb_realm_id, qb_environment]): + credentials = { + 'access_token': qb_access_token, + 'realm_id': qb_realm_id, + 'environment': qb_environment + } + scope['qb_credentials'] = credentials + qb_credentials_context.set(credentials) + logger.info( + f"QB credentials extracted from headers for realm: {qb_realm_id or 'env'}") + + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + await session_manager_instance.cleanup() + logger.info("Application shutting down...") + + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/quickbooks/session_manager.py b/mcp_servers/quickbooks/session_manager.py new file mode 100644 index 00000000..8a0a681c --- /dev/null +++ b/mcp_servers/quickbooks/session_manager.py @@ -0,0 +1,133 @@ +""" +Session-based QuickBooks configuration management for MCP server. +Allows clients to provide QB credentials via headers or initialization. +""" + +import json +import logging +from typing import Dict, Any, Optional, Tuple +import os +import base64 + +from tools.http_client import QuickBooksHTTPClient +from tools.accounts import AccountManager +from tools.invoices import InvoiceManager +from tools.customers import CustomerManager +from tools.payments import PaymentManager +from tools.vendors import VendorManager + +logger = logging.getLogger(__name__) + + +class QuickBooksSession: + """Represents a QuickBooks session with specific credentials.""" + + def __init__(self, access_token: str = None, realm_id: str = None, environment: str = None): + self.client = QuickBooksHTTPClient( + access_token=access_token, + company_id=realm_id, + environment=environment + ) + + if not self.client.is_configured(): + raise ValueError("QuickBooks session not properly configured") + + # Initialize managers + self.account_manager = AccountManager(self.client) + self.invoice_manager = InvoiceManager(self.client) + self.customer_manager = CustomerManager(self.client) + self.payment_manager = PaymentManager(self.client) + self.vendor_manager = VendorManager(self.client) + + logger.info(f"QuickBooks session created for realm: {realm_id or 'env'}") + + async def close(self): + """Close the session and cleanup resources.""" + await self.client.close() + + +class SessionManager: + """Manages QuickBooks sessions and routes requests to appropriate sessions.""" + + def __init__(self): + self.sessions: Dict[str, QuickBooksSession] = {} + self.default_session: Optional[QuickBooksSession] = None + + # Try to create a default session from environment variables + try: + self.default_session = QuickBooksSession() + logger.info("Default QuickBooks session created from environment variables") + except ValueError: + logger.warning("No default QuickBooks session available. Clients must provide credentials.") + + def create_session_key(self, access_token: str = None, realm_id: str = None, environment: str = None) -> str: + """Create a unique key for session caching.""" + return f"{access_token or 'env'}_{realm_id or 'env'}_{environment or 'env'}" + + def get_session(self, access_token: str = None, realm_id: str = None, environment: str = None) -> QuickBooksSession: + """Get or create a session for the given credentials.""" + # If no credentials provided, use default session + if not any([access_token, realm_id, environment]): + if self.default_session: + return self.default_session + raise ValueError("No credentials provided and no default session available") + + # Check cache + session_key = self.create_session_key(access_token, realm_id, environment) + if session_key in self.sessions: + return self.sessions[session_key] + + # Create new session + try: + session = QuickBooksSession(access_token, realm_id, environment) + self.sessions[session_key] = session + return session + except ValueError as e: + raise ValueError(f"Failed to create QuickBooks session: {str(e)}") + + def extract_credentials_from_headers(self, request_or_scope) -> Tuple[str, str, str]: + """Extract QuickBooks credentials from request headers. + + Returns: + tuple: (access_token, realm_id, environment) + """ + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Get headers based on input type + if hasattr(request_or_scope, 'headers'): + # SSE request object + header_value = request_or_scope.headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + header_value = headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + if not auth_data: + return "", "", "" + + try: + auth_json = json.loads(auth_data) + return ( + auth_json.get('access_token', ''), + auth_json.get('realm_id', ''), + auth_json.get('environment', '') + ) + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "", "", "" + + async def cleanup(self): + """Cleanup all sessions.""" + if self.default_session: + await self.default_session.close() + + for session in self.sessions.values(): + await session.close() + + self.sessions.clear() + logger.info("All QuickBooks sessions cleaned up") diff --git a/mcp_servers/quickbooks/tools/__init__.py b/mcp_servers/quickbooks/tools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mcp_servers/quickbooks/tools/accounts.py b/mcp_servers/quickbooks/tools/accounts.py new file mode 100644 index 00000000..66220d8b --- /dev/null +++ b/mcp_servers/quickbooks/tools/accounts.py @@ -0,0 +1,390 @@ +from typing import Any, Dict, List + +from mcp.types import Tool +import mcp.types as types +from .http_client import QuickBooksHTTPClient + +# Minimal properties for account creation (required by QuickBooks) +account_properties_minimal = { + "Name": { + "type": "string", + "description": "Account name (required, must be unique)" + }, + "AccountType": { + "type": "string", + "description": "Account type classification. Valid values: Bank, Other Current Asset, Fixed Asset, Other Asset, Accounts Receivable, Equity, Expense, Other Expense, Cost of Goods Sold, Accounts Payable, Credit Card, Long Term Liability, Other Current Liability, Income, Other Income." + } +} + +# Account properties mapping (based on QuickBooks API documentation) +account_properties_user_define = { + **account_properties_minimal, + "Description": { + "type": "string", + "description": "User entered description for the account, which may include user entered information to guide bookkeepers/accountants in deciding what journal entries to post to the account" + }, + "Active": { + "type": "boolean", + "description": "Whether or not the account is active. Inactive accounts may be hidden from most display purposes and may not be posted to" + } +} + +account_properties = { + **account_properties_user_define, + "Id": { + "type": "string", + "description": "The unique QuickBooks account ID" + } +} + +# MCP Tool definitions +create_account_tool = Tool( + name="quickbooks_create_account", + title="Create Account", + description="Create a new account in QuickBooks", + inputSchema={ + "type": "object", + "properties": account_properties_minimal, + "required": ["Name"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_ACCOUNT"}) +) + +get_account_tool = Tool( + name="quickbooks_get_account", + title="Get Account", + description="Get a specific account by ID from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks account ID"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_ACCOUNT", "readOnlyHint": True}) +) + +list_accounts_tool = Tool( + name="quickbooks_list_accounts", + title="List Accounts", + description="List all chart of accounts from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "AccountType": {"type": "string", "description": "Filter by account type: Asset, Liability, Income, Expense, Equity"}, + "ActiveOnly": {"type": "boolean", "description": "Return only active accounts", "default": True} + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_ACCOUNT", "readOnlyHint": True}) +) + +update_account_tool = Tool( + name="quickbooks_update_account", + title="Update Account", + description="Update an existing account in QuickBooks", + inputSchema={ + "type": "object", + "properties": account_properties, + "required": ["Id", "Name"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_ACCOUNT"}) +) + +search_accounts_tool = Tool( + name="quickbooks_search_accounts", + title="Search Accounts", + description="Advanced Account Search - Search accounts with powerful filters including name, type, classification, status, and other criteria. Perfect for finding specific accounts based on various parameters", + inputSchema={ + "type": "object", + "properties": { + "Name": {"type": "string", "description": "Search by account name (partial match)"}, + "AccountType": {"type": "string", "description": "Filter by account type: Bank, Other Current Asset, Fixed Asset, Other Asset, Accounts Receivable, Equity, Expense, Other Expense, Cost of Goods Sold, Accounts Payable, Credit Card, Long Term Liability, Other Current Liability, Income, Other Income"}, + "Classification": {"type": "string", "description": "Filter by classification: Asset, Liability, Income, Expense, Equity"}, + "Active": {"type": "boolean", "description": "Filter by active status"}, + "FullyQualifiedName": {"type": "string", "description": "Search by fully qualified name (partial match)"}, + "Description": {"type": "string", "description": "Search by description (partial match)"}, + + # Currency filters + "CurrencyRefValue": {"type": "string", "description": "Filter by currency code"}, + "CurrencyRefName": {"type": "string", "description": "Search by currency name (partial match)"}, + + # Date filters + "CreateTimeFrom": {"type": "string", "description": "Search accounts created from this date (YYYY-MM-DD format)"}, + "CreateTimeTo": {"type": "string", "description": "Search accounts created to this date (YYYY-MM-DD format)"}, + "LastUpdatedTimeFrom": {"type": "string", "description": "Search accounts last updated from this date (YYYY-MM-DD format)"}, + "LastUpdatedTimeTo": {"type": "string", "description": "Search accounts last updated to this date (YYYY-MM-DD format)"}, + + # Pagination + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1} + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_ACCOUNT", "readOnlyHint": True}) +) + + +def mcp_object_to_account_data(**kwargs) -> Dict[str, Any]: + """ + Convert MCP object format to QuickBooks account data format. + This function transforms the flat MCP structure to the nested format expected by QuickBooks API. + """ + account_data = {} + + # Basic account information - direct copy + for field in ['Name', 'AccountType', 'Description', 'Active']: + if field in kwargs: + account_data[field] = kwargs[field] + + return account_data + + +def account_data_to_mcp_object(account_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert QuickBooks account data format to MCP object format. + This function flattens the nested QuickBooks structure to the flat format expected by MCP tools. + """ + mcp_object = {} + + # Copy basic fields if present + field_mappings = { + 'Id': 'Id', + 'Name': 'Name', + 'AccountType': 'AccountType', + 'Description': 'Description', + 'Active': 'Active', + 'Classification': 'Classification', + 'FullyQualifiedName': 'FullyQualifiedName', + 'CurrentBalance': 'CurrentBalance' + } + + for qb_field, mcp_field in field_mappings.items(): + if qb_field in account_data: + mcp_object[mcp_field] = account_data[qb_field] + + # Currency reference + if 'CurrencyRef' in account_data and isinstance(account_data['CurrencyRef'], dict): + if 'value' in account_data['CurrencyRef']: + mcp_object['CurrencyRefValue'] = account_data['CurrencyRef']['value'] + if 'name' in account_data['CurrencyRef']: + mcp_object['CurrencyRefName'] = account_data['CurrencyRef']['name'] + + # MetaData fields + if 'MetaData' in account_data and isinstance(account_data['MetaData'], dict): + metadata = account_data['MetaData'] + if 'CreateTime' in metadata: + mcp_object['CreateTime'] = metadata['CreateTime'] + if 'LastUpdatedTime' in metadata: + mcp_object['LastUpdatedTime'] = metadata['LastUpdatedTime'] + + # SyncToken + if 'SyncToken' in account_data: + mcp_object['SyncToken'] = account_data['SyncToken'] + + return mcp_object + + +class AccountManager: + def __init__(self, client: QuickBooksHTTPClient): + self.client = client + + async def create_account(self, **kwargs) -> Dict[str, Any]: + """Create a new account with comprehensive property support.""" + account_data = mcp_object_to_account_data(**kwargs) + + response = await self.client._post('account', account_data) + return account_data_to_mcp_object(response['Account']) + + async def get_account(self, Id: str) -> Dict[str, Any]: + """Get a specific account by ID.""" + response = await self.client._get(f"account/{Id}") + return account_data_to_mcp_object(response['Account']) + + async def list_accounts(self, MaxResults: int = 100, AccountType: str = None, ActiveOnly: bool = True) -> List[Dict[str, Any]]: + """List all accounts with comprehensive properties and pagination support.""" + query = "SELECT * FROM Account" + + conditions = [] + if ActiveOnly: + conditions.append("Active = true") + if AccountType: + conditions.append(f"Classification = '{AccountType}'") + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + query += f" MAXRESULTS {MaxResults}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no accounts are returned + if 'Account' not in response['QueryResponse']: + return [] + + accounts = response['QueryResponse']['Account'] + return [account_data_to_mcp_object(account) for account in accounts] + + async def update_account(self, **kwargs) -> Dict[str, Any]: + """Update an existing account with comprehensive property support.""" + account_id = kwargs.get('Id') + if not account_id: + raise ValueError("Id is required for updating an account") + + # Auto-fetch current sync token + current_account_response = await self.client._get(f"account/{account_id}") + sync_token = current_account_response.get( + 'Account', {}).get('SyncToken', '0') + + account_data = mcp_object_to_account_data(**kwargs) + account_data.update({ + "Id": account_id, + "SyncToken": sync_token, + "sparse": True, + }) + + response = await self.client._post('account', account_data) + return account_data_to_mcp_object(response['Account']) + + async def search_accounts(self, **kwargs) -> List[Dict[str, Any]]: + """ + Search accounts with various filters and pagination support. + + Args: + Name: Search by account name (partial match) + AccountType: Filter by account type + Classification: Filter by classification (Asset, Liability, Income, Expense, Equity) + Active: Filter by active status + FullyQualifiedName: Search by fully qualified name (partial match) + Description: Search by description (partial match) + + # Balance filters + MinCurrentBalance/MaxCurrentBalance: Filter by current balance range + + # Currency filters + CurrencyRefValue: Filter by currency code + CurrencyRefName: Search by currency name (partial match) + + # Date filters + CreateTimeFrom/CreateTimeTo: Filter by creation date range + LastUpdatedTimeFrom/LastUpdatedTimeTo: Filter by last updated date range + + MaxResults: Maximum number of results to return (default: 100) + StartPosition: Starting position for pagination (default: 1) + + Returns: + List of accounts matching the search criteria + """ + # Build WHERE clause conditions + conditions = [] + + # Basic filters + if kwargs.get('AccountType'): + conditions.append(f"AccountType = '{kwargs['AccountType']}'") + + if kwargs.get('Classification'): + conditions.append(f"Classification = '{kwargs['Classification']}'") + + if kwargs.get('Active') is not None: + conditions.append(f"Active = {str(kwargs['Active']).lower()}") + + if kwargs.get('CurrencyRefValue'): + conditions.append( + f"CurrencyRef.value = '{kwargs['CurrencyRefValue']}'") + + # Name searches (partial match) - we'll need to post-filter these due to QB API limitations + partial_match_filters = {} + + if kwargs.get('Name'): + partial_match_filters['Name'] = kwargs['Name'].lower() + + if kwargs.get('FullyQualifiedName'): + partial_match_filters['FullyQualifiedName'] = kwargs['FullyQualifiedName'].lower() + + if kwargs.get('Description'): + partial_match_filters['Description'] = kwargs['Description'].lower() + + if kwargs.get('CurrencyRefName'): + partial_match_filters['CurrencyRefName'] = kwargs['CurrencyRefName'].lower() + + # Balance range filters + if kwargs.get('MinCurrentBalance') is not None: + conditions.append( + f"CurrentBalance >= {kwargs['MinCurrentBalance']}") + if kwargs.get('MaxCurrentBalance') is not None: + conditions.append( + f"CurrentBalance <= {kwargs['MaxCurrentBalance']}") + + # Date range filters + if kwargs.get('CreateTimeFrom'): + conditions.append( + f"MetaData.CreateTime >= '{kwargs['CreateTimeFrom']}'") + if kwargs.get('CreateTimeTo'): + conditions.append( + f"MetaData.CreateTime <= '{kwargs['CreateTimeTo']}'") + + if kwargs.get('LastUpdatedTimeFrom'): + conditions.append( + f"MetaData.LastUpdatedTime >= '{kwargs['LastUpdatedTimeFrom']}'") + if kwargs.get('LastUpdatedTimeTo'): + conditions.append( + f"MetaData.LastUpdatedTime <= '{kwargs['LastUpdatedTimeTo']}'") + + # Build the query + query = "SELECT * FROM Account" + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + # Add pagination + max_results = kwargs.get('MaxResults', 100) + start_position = kwargs.get('StartPosition', 1) + + query += f" STARTPOSITION {start_position} MAXRESULTS {max_results}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no accounts are returned + if 'Account' not in response['QueryResponse']: + return [] + + accounts = response['QueryResponse']['Account'] + + # Apply post-filtering for partial matches + if partial_match_filters: + filtered_accounts = [] + for account in accounts: + should_include = True + + for field, search_value in partial_match_filters.items(): + if field == 'Name' and 'Name' in account: + if search_value not in account['Name'].lower(): + should_include = False + break + elif field == 'FullyQualifiedName' and 'FullyQualifiedName' in account: + if search_value not in account['FullyQualifiedName'].lower(): + should_include = False + break + elif field == 'Description' and 'Description' in account: + if search_value not in account['Description'].lower(): + should_include = False + break + elif field == 'CurrencyRefName' and 'CurrencyRef' in account and isinstance(account['CurrencyRef'], dict): + currency_name = account['CurrencyRef'].get( + 'name', '').lower() + if search_value not in currency_name: + should_include = False + break + + if should_include: + filtered_accounts.append(account) + + accounts = filtered_accounts + + return [account_data_to_mcp_object(account) for account in accounts] + + +# Export tools +tools = [create_account_tool, get_account_tool, + list_accounts_tool, search_accounts_tool, update_account_tool] diff --git a/mcp_servers/quickbooks/tools/customers.py b/mcp_servers/quickbooks/tools/customers.py new file mode 100644 index 00000000..39d6de2c --- /dev/null +++ b/mcp_servers/quickbooks/tools/customers.py @@ -0,0 +1,847 @@ +from typing import Any, Dict, List + +from mcp.types import Tool +import mcp.types as types +from .http_client import QuickBooksHTTPClient + +customer_properties_user_define = { + "DisplayName": { + "type": "string", + "description": "The name of the person or organization as displayed. Must be unique across all Customer, Vendor, and Employee objects. Cannot be removed with sparse update." + }, + "GivenName": { + "type": "string", + "description": "Given name or first name of a person." + }, + "MiddleName": { + "type": "string", + "description": "Middle name of the person. The person can have zero or more middle names." + }, + "FamilyName": { + "type": "string", + "description": "Family name or the last name of the person." + }, + "PrimaryEmailAddr": { + "type": "string", + "description": "Primary email address." + }, + "DefaultTaxCodeRefValue": { + "type": "string", + "description": "The ID for the default tax code associated with this Customer object. Valid if Customer.Taxable is set to true; otherwise, it is ignored. If automated sales tax is enabled, the default tax code is set by the system and cannot be overridden." + }, + "DefaultTaxCodeRefName": { + "type": "string", + "description": "The name of the default tax code associated with this Customer object." + }, + "BillWithParent": { + "type": "boolean", + "description": "If true, this Customer object is billed with its parent. If false, or null the customer is not to be billed with its parent. This attribute is valid only if this entity is a Job or sub Customer." + }, + "CurrencyRefValue": { + "type": "string", + "description": "The ID for the currency in which all amounts associated with this customer are expressed. Once set, it cannot be changed." + }, + "CurrencyRefName": { + "type": "string", + "description": "The name of the currency in which all amounts associated with this customer are expressed." + }, + "PrimaryPhone": { + "type": "string", + "description": "Primary phone number." + }, + "Taxable": { + "type": "boolean", + "description": "If true, transactions for this customer are taxable." + }, + "Notes": { + "type": "string", + "description": "Free form text describing the Customer." + }, + "WebAddr": { + "type": "string", + "description": "Website address of the Customer." + }, + "CompanyName": { + "type": "string", + "description": "The name of the company associated with the person or organization." + }, + "Balance": { + "type": "number", + "description": "Specifies the open balance amount or the amount unpaid by the customer. For the create operation, this represents the opening balance for the customer. When returned in response to the query request it represents the current open balance (unpaid amount) for that customer. Write-on-create." + }, + "OpenBalanceDate": { + "type": "string", + "description": "Date of the Open Balance for the create operation. Write-on-create." + }, + "ShipAddrLine1": { + "type": "string", + "description": "First line of the shipping address." + }, + "ShipAddrLine2": { + "type": "string", + "description": "Second line of the shipping address." + }, + "ShipAddrCity": { + "type": "string", + "description": "City name for the shipping address." + }, + "ShipAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the shipping address. For example, state name for USA, province name for Canada." + }, + "ShipAddrPostalCode": { + "type": "string", + "description": "Postal code for the shipping address." + }, + "ShipAddrCountry": { + "type": "string", + "description": "Country name for the shipping address. For international addresses - countries should be passed as 3 ISO alpha-3 characters or the full name of the country." + }, + "PaymentMethodRefValue": { + "type": "string", + "description": "The ID for the payment method associated with this Customer object." + }, + "PaymentMethodRefName": { + "type": "string", + "description": "The name of the payment method associated with this Customer object." + }, + "BillAddrLine1": { + "type": "string", + "description": "First line of the billing address." + }, + "BillAddrLine2": { + "type": "string", + "description": "Second line of the billing address." + }, + "BillAddrCity": { + "type": "string", + "description": "City name for the billing address." + }, + "BillAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the billing address. For example, state name for USA, province name for Canada." + }, + "BillAddrPostalCode": { + "type": "string", + "description": "Postal code for the billing address." + }, + "BillAddrCountry": { + "type": "string", + "description": "Country name for the billing address. For international addresses - countries should be passed as 3 ISO alpha-3 characters or the full name of the country." + } +} + +customer_properties = { + **customer_properties_user_define, + "Id": { + "type": "string", + "description": "The unique identifier for the customer in QuickBooks. Sort order is ASC by default. System generated" + }, +} + +# MCP Tool definitions +create_customer_tool = Tool( + name="quickbooks_create_customer", + description="Create a new customer in QuickBooks", + inputSchema={ + "type": "object", + "properties": customer_properties_user_define, + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER"}), +) + +get_customer_tool = Tool( + name="quickbooks_get_customer", + description="Get a specific customer by ID from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks customer ID"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER", "readOnlyHint": True}), +) + +list_customers_tool = Tool( + name="quickbooks_list_customers", + description="List all customers from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "ActiveOnly": {"type": "boolean", "description": "Return only active customers", "default": True} + } + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER", "readOnlyHint": True}), +) + +update_customer_tool = Tool( + name="quickbooks_update_customer", + description="Update an existing customer in QuickBooks", + inputSchema={ + "type": "object", + "properties": customer_properties, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER"}), +) + +deactivate_customer_tool = Tool( + name="quickbooks_deactivate_customer", + description="Deactivate a customer from QuickBooks (set Active to false)", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks customer ID to deactivate"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER"}), +) + +activate_customer_tool = Tool( + name="quickbooks_activate_customer", + description="Activate a customer in QuickBooks (set Active to true)", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks customer ID to activate"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER"}), +) + +search_customers_tool = Tool( + name="quickbooks_search_customers", + title="Search Customers", + description="Advanced Customer Search - Search customers with powerful filters including name, contact info, address, balance, status, and other criteria. Perfect for finding specific customers based on various parameters", + inputSchema={ + "type": "object", + "properties": { + "DisplayName": {"type": "string", "description": "Search by customer display name (partial match)"}, + "GivenName": {"type": "string", "description": "Search by given/first name (partial match)"}, + "FamilyName": {"type": "string", "description": "Search by family/last name (partial match)"}, + "MiddleName": {"type": "string", "description": "Search by middle name (partial match)"}, + "CompanyName": {"type": "string", "description": "Search by company name (partial match)"}, + "PrimaryEmailAddr": {"type": "string", "description": "Search by email address (partial match)"}, + "PrimaryPhone": {"type": "string", "description": "Search by phone number (partial match)"}, + + # Status filters + "Active": {"type": "boolean", "description": "Filter by active status"}, + "Taxable": {"type": "boolean", "description": "Filter by taxable status"}, + + # Address filters - Billing Address + "BillAddrCity": {"type": "string", "description": "Search by billing address city"}, + "BillAddrCountrySubDivisionCode": {"type": "string", "description": "Search by billing address state/province"}, + "BillAddrPostalCode": {"type": "string", "description": "Search by billing address postal code"}, + "BillAddrCountry": {"type": "string", "description": "Search by billing address country"}, + "BillAddrLine1": {"type": "string", "description": "Search by billing address line 1 (partial match)"}, + + # Address filters - Shipping Address + "ShipAddrCity": {"type": "string", "description": "Search by shipping address city"}, + "ShipAddrCountrySubDivisionCode": {"type": "string", "description": "Search by shipping address state/province"}, + "ShipAddrPostalCode": {"type": "string", "description": "Search by shipping address postal code"}, + "ShipAddrCountry": {"type": "string", "description": "Search by shipping address country"}, + "ShipAddrLine1": {"type": "string", "description": "Search by shipping address line 1 (partial match)"}, + + # Balance filters + "MinBalance": {"type": "number", "description": "Minimum balance amount"}, + "MaxBalance": {"type": "number", "description": "Maximum balance amount"}, + + # Reference filters + "CurrencyRefValue": {"type": "string", "description": "Filter by currency code"}, + "CurrencyRefName": {"type": "string", "description": "Search by currency name (partial match)"}, + "PaymentMethodRefValue": {"type": "string", "description": "Filter by payment method ID"}, + "PaymentMethodRefName": {"type": "string", "description": "Search by payment method name (partial match)"}, + + # Date filters + "OpenBalanceDateFrom": {"type": "string", "description": "Search by opening balance date from (YYYY-MM-DD format)"}, + "OpenBalanceDateTo": {"type": "string", "description": "Search by opening balance date to (YYYY-MM-DD format)"}, + + # Other filters + "WebAddr": {"type": "string", "description": "Search by website address (partial match)"}, + "Notes": {"type": "string", "description": "Search by notes/description (partial match)"}, + + # Pagination + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1} + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_CUSTOMER", "readOnlyHint": True}) +) + + +def mcp_object_to_customer_data(**kwargs) -> Dict[str, Any]: + """ + Convert MCP object format to QuickBooks customer data format. + This function transforms the flat MCP structure to the nested format expected by QuickBooks API. + """ + customer_data = {} + + # Basic customer information - direct copy + for field in ['DisplayName', 'GivenName', 'MiddleName', 'FamilyName', + 'CompanyName']: + if field in kwargs: + customer_data[field] = kwargs[field] + + # Contact information - convert to structured objects + if 'PrimaryEmailAddr' in kwargs: + customer_data['PrimaryEmailAddr'] = { + 'Address': kwargs['PrimaryEmailAddr']} + + # Handle phone fields - use PrimaryPhone as the standard input field + if 'PrimaryPhone' in kwargs: + customer_data['PrimaryPhone'] = { + 'FreeFormNumber': kwargs['PrimaryPhone']} + + # Handle Mobile input (for backward compatibility with existing code) + if 'Mobile' in kwargs: + customer_data['Mobile'] = { + 'FreeFormNumber': kwargs['Mobile']} + + if 'WebAddr' in kwargs: + customer_data['WebAddr'] = {'URI': kwargs['WebAddr']} + + # Reference objects - convert separate value/name fields to structured objects + ref_mappings = [ + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('PaymentMethodRef', 'PaymentMethodRefValue', 'PaymentMethodRefName') + ] + + for ref_name, value_field, name_field in ref_mappings: + if value_field in kwargs: + ref_obj = {'value': kwargs[value_field]} + if name_field in kwargs: + ref_obj['name'] = kwargs[name_field] + customer_data[ref_name] = ref_obj + + # Address fields - convert flattened fields to structured objects + for addr_type in ['BillAddr', 'ShipAddr']: + address_fields = ['Line1', 'Line2', 'City', + 'CountrySubDivisionCode', 'PostalCode', 'Country'] + has_address = any(kwargs.get(f'{addr_type}{field}') + for field in address_fields) + if has_address: + addr_obj = {} + for field in address_fields: + if kwargs.get(f'{addr_type}{field}'): + addr_obj[field] = kwargs[f'{addr_type}{field}'] + customer_data[addr_type] = addr_obj + + # Boolean fields + for field in ['BillWithParent', 'Taxable', 'Active']: + if field in kwargs: + customer_data[field] = kwargs[field] + + # Numeric fields + for field in ['Balance']: + if field in kwargs: + customer_data[field] = kwargs[field] + + # Date fields + for field in ['OpenBalanceDate']: + if field in kwargs: + customer_data[field] = kwargs[field] + + # Metadata fields - removed for input + + return customer_data + + +def customer_data_to_mcp_object(customer_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert QuickBooks customer data format to MCP object format. + This function flattens the nested QuickBooks structure to the flat format expected by MCP tools. + All fields from customer_properties_full are included with default values if not present. + """ + mcp_object = {} + # Only set fields that exist in customer_data + for field in [ + 'Id', 'DisplayName', 'Title', 'GivenName', 'MiddleName', 'FamilyName', + 'Suffix', 'CompanyName', 'FullyQualifiedName', 'ResaleNum', 'SecondaryTaxIdentifier', + 'BusinessNumber', 'GSTIN', 'GSTRegistrationType', 'PrimaryTaxIdentifier', + 'PreferredDeliveryMethod', 'Notes', 'PrintOnCheckName', + 'ARAccountRefValue', 'ARAccountRefName', 'DefaultTaxCodeRefValue', 'DefaultTaxCodeRefName', + 'SalesTermRefValue', 'SalesTermRefName', 'CurrencyRefValue', 'CurrencyRefName', + 'CustomerTypeRef', 'ParentRefValue', 'ParentRefName', 'PaymentMethodRefValue', 'PaymentMethodRefName', + 'BillAddrLine1', 'BillAddrLine2', + 'BillAddrCity', 'BillAddrCountrySubDivisionCode', 'BillAddrPostalCode', 'BillAddrCountry', + 'ShipAddrLine1', 'ShipAddrLine2', + 'ShipAddrCity', 'ShipAddrCountrySubDivisionCode', 'ShipAddrPostalCode', 'ShipAddrCountry', + 'OpenBalanceDate', 'MetaDataCreateTime', 'MetaDataLastUpdatedTime', + 'PrimaryEmailAddr', 'PrimaryPhone', 'Mobile', 'AlternatePhone', 'Fax', 'WebAddr', + 'Balance', 'BalanceWithJobs', 'TaxExemptionReasonId', + 'BillWithParent', 'Job', 'Taxable', 'Active', 'IsProject']: + if field in customer_data: + mcp_object[field] = customer_data[field] + + # Contact information - flatten structured objects + if 'PrimaryEmailAddr' in customer_data: + addr = customer_data['PrimaryEmailAddr'] + if isinstance(addr, dict) and 'Address' in addr: + mcp_object['PrimaryEmailAddr'] = addr['Address'] + for phone_field in ['PrimaryPhone', 'Mobile', 'AlternatePhone', 'Fax']: + if phone_field in customer_data: + phone = customer_data[phone_field] + if isinstance(phone, dict) and 'FreeFormNumber' in phone: + mcp_object[phone_field] = phone['FreeFormNumber'] + if 'WebAddr' in customer_data: + web = customer_data['WebAddr'] + if isinstance(web, dict) and 'URI' in web: + mcp_object['WebAddr'] = web['URI'] + + # Reference objects - flatten to separate value and name fields + if 'ARAccountRef' in customer_data: + ref = customer_data['ARAccountRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['ARAccountRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['ARAccountRefName'] = ref['name'] + if 'DefaultTaxCodeRef' in customer_data: + ref = customer_data['DefaultTaxCodeRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['DefaultTaxCodeRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['DefaultTaxCodeRefName'] = ref['name'] + if 'SalesTermRef' in customer_data: + ref = customer_data['SalesTermRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['SalesTermRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['SalesTermRefName'] = ref['name'] + if 'CurrencyRef' in customer_data: + ref = customer_data['CurrencyRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['CurrencyRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['CurrencyRefName'] = ref['name'] + if 'CustomerTypeRef' in customer_data: + ref = customer_data['CustomerTypeRef'] + if isinstance(ref, dict) and 'value' in ref: + mcp_object['CustomerTypeRef'] = ref['value'] + if 'ParentRef' in customer_data: + ref = customer_data['ParentRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['ParentRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['ParentRefName'] = ref['name'] + if 'PaymentMethodRef' in customer_data: + ref = customer_data['PaymentMethodRef'] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object['PaymentMethodRefValue'] = ref['value'] + if 'name' in ref: + mcp_object['PaymentMethodRefName'] = ref['name'] + + # Address fields - flatten structured objects + if 'BillAddr' in customer_data: + addr = customer_data['BillAddr'] + if isinstance(addr, dict): + for field in ['Line1', 'Line2', 'City', 'CountrySubDivisionCode', 'PostalCode', 'Country']: + if field in addr: + mcp_object[f'BillAddr{field}'] = addr[field] + if 'ShipAddr' in customer_data: + addr = customer_data['ShipAddr'] + if isinstance(addr, dict): + for field in ['Line1', 'Line2', 'City', 'CountrySubDivisionCode', 'PostalCode', 'Country']: + if field in addr: + mcp_object[f'ShipAddr{field}'] = addr[field] + + # Metadata fields - flatten structured object + if 'MetaData' in customer_data: + metadata = customer_data['MetaData'] + if isinstance(metadata, dict): + if 'CreateTime' in metadata: + mcp_object['MetaDataCreateTime'] = metadata['CreateTime'] + if 'LastUpdatedTime' in metadata: + mcp_object['MetaDataLastUpdatedTime'] = metadata['LastUpdatedTime'] + return mcp_object + + +class CustomerManager: + def __init__(self, client: QuickBooksHTTPClient): + self.client = client + + async def create_customer(self, **kwargs) -> Dict[str, Any]: + + # Validate we have DisplayName or at least one name component + display_name_provided = 'DisplayName' in kwargs + name_components_provided = any([ + 'GivenName' in kwargs, + 'FamilyName' in kwargs, + 'MiddleName' in kwargs, + ]) + + if not display_name_provided and not name_components_provided: + raise ValueError( + "At least one of: DisplayName, GivenName, FamilyName, MiddleName must be provided.") + + customer_data = mcp_object_to_customer_data(**kwargs) + response = await self.client._post('customer', customer_data) + + # Convert response back to MCP format + return customer_data_to_mcp_object(response['Customer']) + + async def get_customer(self, Id: str) -> Dict[str, Any]: + response = await self.client._get(f"customer/{Id}") + + # Convert response back to MCP format + return customer_data_to_mcp_object(response['Customer']) + + async def list_customers(self, MaxResults: int = 100, ActiveOnly: bool = True) -> List[Dict[str, Any]]: + query = "SELECT * FROM Customer" + if ActiveOnly: + query += " WHERE Active = true" + query += f" STARTPOSITION 1 MAXRESULTS {MaxResults}" + response = await self.client._get('query', params={'query': query}) + + # Convert response back to MCP format + customers = response['QueryResponse']['Customer'] + return [customer_data_to_mcp_object(customer) for customer in customers] + + async def update_customer(self, **kwargs) -> Dict[str, Any]: + customer_id = kwargs.get('Id') + if not customer_id: + raise ValueError("Id is required for updating a customer") + + # Auto-fetch current sync token + current_customer_response = await self.client._get(f"customer/{customer_id}") + sync_token = current_customer_response.get( + 'Customer', {}).get('SyncToken', '0') + + # Use the mcp_object_to_customer_data function to convert the input + customer_data = mcp_object_to_customer_data(**kwargs) + + # Add required fields for update + customer_data.update({ + "Id": customer_id, + "SyncToken": sync_token, + "sparse": True + }) + + response = await self.client._post('customer', customer_data) + + # Convert response back to MCP format + return customer_data_to_mcp_object(response['Customer']) + + async def deactivate_customer(self, Id: str) -> Dict[str, Any]: + # First get the current customer to obtain the SyncToken + current_customer_response = await self.client._get(f"customer/{Id}") + current_data = current_customer_response.get('Customer', {}) + + # Set Active to false for deactivation + customer_data = { + "Id": str(current_data.get('Id')), + "SyncToken": str(current_data.get('SyncToken')), + "Active": False, + "sparse": True + } + + response = await self.client._post('customer', customer_data) + + # Convert response back to MCP format + return customer_data_to_mcp_object(response['Customer']) + + async def activate_customer(self, Id: str) -> Dict[str, Any]: + # First get the current customer to obtain the SyncToken + current_customer_response = await self.client._get(f"customer/{Id}") + current_data = current_customer_response.get('Customer', {}) + + # Set Active to true for activation + customer_data = { + "Id": str(current_data.get('Id')), + "SyncToken": str(current_data.get('SyncToken')), + "Active": True, + "sparse": True + } + + response = await self.client._post('customer', customer_data) + + # Convert response back to MCP format + return customer_data_to_mcp_object(response['Customer']) + + async def search_customers(self, **kwargs) -> List[Dict[str, Any]]: + """ + Search customers with various filters and pagination support. + + Args: + DisplayName: Search by customer display name (partial match) + GivenName/FamilyName/MiddleName: Search by name components (partial match) + CompanyName: Search by company name (partial match) + PrimaryEmailAddr/PrimaryPhone: Search by contact info (partial match) + + # Status filters + Active: Filter by active status + Job: Filter by job status (sub-customers) + BillWithParent: Filter by bill with parent status + Taxable: Filter by taxable status + + # Identification filters + ResaleNum/BusinessNumber/GSTIN: Search by identification numbers + PrimaryTaxIdentifier/SecondaryTaxIdentifier: Search by tax identifiers + + # Address filters (billing and shipping) + BillAddrCity/ShipAddrCity: Search by city + BillAddrCountrySubDivisionCode/ShipAddrCountrySubDivisionCode: Search by state/province + BillAddrPostalCode/ShipAddrPostalCode: Search by postal code + BillAddrCountry/ShipAddrCountry: Search by country + BillAddrLine1/ShipAddrLine1: Search by address line 1 (partial match) + + # Balance filters + MinBalance/MaxBalance: Filter by balance range + MinBalanceWithJobs/MaxBalanceWithJobs: Filter by balance with jobs range + + # Reference filters + ParentRefValue: Filter by parent customer ID + ParentRefName: Search by parent customer name (partial match) + CurrencyRefValue: Filter by currency code + CurrencyRefName: Search by currency name (partial match) + SalesTermRefValue: Filter by sales term ID + PaymentMethodRefValue: Filter by payment method ID + DefaultTaxCodeRefValue: Filter by default tax code ID + + # Date filters + CreateTimeFrom/CreateTimeTo: Filter by creation date range + LastUpdatedTimeFrom/LastUpdatedTimeTo: Filter by last updated date range + OpenBalanceDateFrom/OpenBalanceDateTo: Filter by opening balance date range + + # Other filters + PreferredDeliveryMethod: Filter by delivery method + GSTRegistrationType: Filter by GST registration type + WebAddr: Search by website (partial match) + Notes: Search by notes (partial match) + + MaxResults: Maximum number of results to return (default: 100) + StartPosition: Starting position for pagination (default: 1) + + Returns: + List of customers matching the search criteria + """ + # Build WHERE clause conditions + conditions = [] + + # Basic filters with exact matches + if kwargs.get('Active') is not None: + conditions.append(f"Active = {str(kwargs['Active']).lower()}") + + if kwargs.get('Job') is not None: + conditions.append(f"Job = {str(kwargs['Job']).lower()}") + + if kwargs.get('BillWithParent') is not None: + conditions.append( + f"BillWithParent = {str(kwargs['BillWithParent']).lower()}") + + if kwargs.get('Taxable') is not None: + conditions.append(f"Taxable = {str(kwargs['Taxable']).lower()}") + + # Reference filters + if kwargs.get('ParentRefValue'): + conditions.append( + f"ParentRef.value = '{kwargs['ParentRefValue']}'") + + if kwargs.get('CurrencyRefValue'): + conditions.append( + f"CurrencyRef.value = '{kwargs['CurrencyRefValue']}'") + + if kwargs.get('SalesTermRefValue'): + conditions.append( + f"SalesTermRef.value = '{kwargs['SalesTermRefValue']}'") + + if kwargs.get('PaymentMethodRefValue'): + conditions.append( + f"PaymentMethodRef.value = '{kwargs['PaymentMethodRefValue']}'") + + if kwargs.get('DefaultTaxCodeRefValue'): + conditions.append( + f"DefaultTaxCodeRef.value = '{kwargs['DefaultTaxCodeRefValue']}'") + + # Exact match filters for structured data + if kwargs.get('ResaleNum'): + conditions.append(f"ResaleNum = '{kwargs['ResaleNum']}'") + + if kwargs.get('BusinessNumber'): + conditions.append(f"BusinessNumber = '{kwargs['BusinessNumber']}'") + + if kwargs.get('GSTIN'): + conditions.append(f"GSTIN = '{kwargs['GSTIN']}'") + + if kwargs.get('PreferredDeliveryMethod'): + conditions.append( + f"PreferredDeliveryMethod = '{kwargs['PreferredDeliveryMethod']}'") + + if kwargs.get('GSTRegistrationType'): + conditions.append( + f"GSTRegistrationType = '{kwargs['GSTRegistrationType']}'") + + # Address filters - exact matches for structured fields + address_exact_fields = { + 'BillAddrCity': 'BillAddr.City', + 'BillAddrCountrySubDivisionCode': 'BillAddr.CountrySubDivisionCode', + 'BillAddrPostalCode': 'BillAddr.PostalCode', + 'BillAddrCountry': 'BillAddr.Country', + 'ShipAddrCity': 'ShipAddr.City', + 'ShipAddrCountrySubDivisionCode': 'ShipAddr.CountrySubDivisionCode', + 'ShipAddrPostalCode': 'ShipAddr.PostalCode', + 'ShipAddrCountry': 'ShipAddr.Country' + } + + for field, qb_field in address_exact_fields.items(): + if kwargs.get(field): + conditions.append(f"{qb_field} = '{kwargs[field]}'") + + # Balance range filters + if kwargs.get('MinBalance') is not None: + conditions.append(f"Balance >= {kwargs['MinBalance']}") + if kwargs.get('MaxBalance') is not None: + conditions.append(f"Balance <= {kwargs['MaxBalance']}") + + if kwargs.get('MinBalanceWithJobs') is not None: + conditions.append( + f"BalanceWithJobs >= {kwargs['MinBalanceWithJobs']}") + if kwargs.get('MaxBalanceWithJobs') is not None: + conditions.append( + f"BalanceWithJobs <= {kwargs['MaxBalanceWithJobs']}") + + # Date range filters + if kwargs.get('CreateTimeFrom'): + conditions.append( + f"MetaData.CreateTime >= '{kwargs['CreateTimeFrom']}'") + if kwargs.get('CreateTimeTo'): + conditions.append( + f"MetaData.CreateTime <= '{kwargs['CreateTimeTo']}'") + + if kwargs.get('LastUpdatedTimeFrom'): + conditions.append( + f"MetaData.LastUpdatedTime >= '{kwargs['LastUpdatedTimeFrom']}'") + if kwargs.get('LastUpdatedTimeTo'): + conditions.append( + f"MetaData.LastUpdatedTime <= '{kwargs['LastUpdatedTimeTo']}'") + + if kwargs.get('OpenBalanceDateFrom'): + conditions.append( + f"OpenBalanceDate >= '{kwargs['OpenBalanceDateFrom']}'") + if kwargs.get('OpenBalanceDateTo'): + conditions.append( + f"OpenBalanceDate <= '{kwargs['OpenBalanceDateTo']}'") + + # Partial match filters - we'll post-filter these due to QB API limitations + partial_match_filters = {} + + partial_fields = [ + 'DisplayName', 'GivenName', 'FamilyName', 'MiddleName', 'CompanyName', + 'PrimaryEmailAddr', 'PrimaryPhone', 'WebAddr', 'Notes', + 'BillAddrLine1', 'ShipAddrLine1', 'PrimaryTaxIdentifier', 'SecondaryTaxIdentifier' + ] + + for field in partial_fields: + if kwargs.get(field): + partial_match_filters[field] = kwargs[field].lower() + + # Reference name searches (requires subqueries or post-filtering) + if kwargs.get('ParentRefName'): + parent_name = kwargs['ParentRefName'].replace( + "'", "''") # Escape single quotes + conditions.append( + f"ParentRef.value IN (SELECT Id FROM Customer WHERE DisplayName LIKE '%{parent_name}%')") + + reference_name_fields = [ + 'CurrencyRefName', 'SalesTermRefName', 'PaymentMethodRefName', 'DefaultTaxCodeRefName'] + for field in reference_name_fields: + if kwargs.get(field): + partial_match_filters[field] = kwargs[field].lower() + + # Build the query + query = "SELECT * FROM Customer" + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + # Add pagination + max_results = kwargs.get('MaxResults', 100) + start_position = kwargs.get('StartPosition', 1) + + query += f" STARTPOSITION {start_position} MAXRESULTS {max_results}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no customers are returned + if 'Customer' not in response['QueryResponse']: + return [] + + customers = response['QueryResponse']['Customer'] + + # Apply post-filtering for partial matches + if partial_match_filters: + filtered_customers = [] + for customer in customers: + should_include = True + + for field, search_value in partial_match_filters.items(): + if field in ['DisplayName', 'GivenName', 'FamilyName', 'MiddleName', 'CompanyName', 'ResaleNum', 'WebAddr', 'Notes']: + if field in customer and search_value not in customer[field].lower(): + should_include = False + break + elif field in ['PrimaryEmailAddr', 'PrimaryPhone']: + if field in customer and search_value not in customer[field].lower(): + should_include = False + break + elif field in ['PrimaryTaxIdentifier', 'SecondaryTaxIdentifier']: + if field in customer and search_value not in customer[field].lower(): + should_include = False + break + elif field == 'BillAddrLine1' and 'BillAddr' in customer and isinstance(customer['BillAddr'], dict): + addr_line = customer['BillAddr'].get( + 'Line1', '').lower() + if search_value not in addr_line: + should_include = False + break + elif field == 'ShipAddrLine1' and 'ShipAddr' in customer and isinstance(customer['ShipAddr'], dict): + addr_line = customer['ShipAddr'].get( + 'Line1', '').lower() + if search_value not in addr_line: + should_include = False + break + elif field == 'CurrencyRefName' and 'CurrencyRef' in customer and isinstance(customer['CurrencyRef'], dict): + currency_name = customer['CurrencyRef'].get( + 'name', '').lower() + if search_value not in currency_name: + should_include = False + break + elif field == 'SalesTermRefName' and 'SalesTermRef' in customer and isinstance(customer['SalesTermRef'], dict): + term_name = customer['SalesTermRef'].get( + 'name', '').lower() + if search_value not in term_name: + should_include = False + break + elif field == 'PaymentMethodRefName' and 'PaymentMethodRef' in customer and isinstance(customer['PaymentMethodRef'], dict): + method_name = customer['PaymentMethodRef'].get( + 'name', '').lower() + if search_value not in method_name: + should_include = False + break + elif field == 'DefaultTaxCodeRefName' and 'DefaultTaxCodeRef' in customer and isinstance(customer['DefaultTaxCodeRef'], dict): + tax_name = customer['DefaultTaxCodeRef'].get( + 'name', '').lower() + if search_value not in tax_name: + should_include = False + break + + if should_include: + filtered_customers.append(customer) + + customers = filtered_customers + + return [customer_data_to_mcp_object(customer) for customer in customers] + + +# Export tools +tools = [create_customer_tool, get_customer_tool, list_customers_tool, + update_customer_tool, deactivate_customer_tool, activate_customer_tool, search_customers_tool] diff --git a/mcp_servers/quickbooks/tools/http_client.py b/mcp_servers/quickbooks/tools/http_client.py new file mode 100644 index 00000000..0bb4b4f3 --- /dev/null +++ b/mcp_servers/quickbooks/tools/http_client.py @@ -0,0 +1,88 @@ +import os +import logging +import httpx +from typing import Dict, Any +from errors import QuickBooksError + +logger = logging.getLogger(__name__) + + +class QuickBooksHTTPClient: + """Direct HTTP client for QuickBooks API using httpx library.""" + + def __init__(self, access_token: str = None, company_id: str = None, environment: str = None, minor_version: int = 75): + # Use provided parameters or fall back to environment variables + self.access_token = access_token or os.getenv('QB_ACCESS_TOKEN') + self.company_id = company_id or os.getenv('QB_REALM_ID') + self.environment = (environment or os.getenv('QB_ENVIRONMENT', 'production')).lower() + self.minor_version = minor_version + self.async_session = httpx.AsyncClient() + + if self.environment == 'sandbox': + self.base_url = "https://sandbox-quickbooks.api.intuit.com/" + else: + self.base_url = "https://quickbooks.api.intuit.com/" + + def is_configured(self) -> bool: + """Check if client is properly configured.""" + return bool(self.access_token and self.company_id) + + async def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]: + if not self.is_configured(): + raise ValueError("QuickBooks client not properly configured") + + url = f"{self.base_url}/v3/company/{self.company_id}/{endpoint}" + params = kwargs.pop('params', {}) + if params is None: + params = {} + params['minorversion'] = self.minor_version + + headers = { + 'Authorization': f'Bearer {self.access_token}', + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } + + logger.debug( + f"Sending {method} to {url} with data: {kwargs.get('json')}") + + def try_get_resp_json(response): + try: + return response.json() + except Exception: + return { + "error": "Invalid JSON response", + "status_code": response.status_code if response else None, + "text": response.text, + } + + try: + response = await self.async_session.request( + method, url, headers=headers, params=params, **kwargs + ) + response.raise_for_status() + return try_get_resp_json(response) + except httpx.HTTPStatusError as e: + logger.error(f"Request failed: {e}") + if hasattr(e, 'response') and e.response is not None: + logger.error(f"Status code: {e.response.status_code}") + logger.error(f"Response headers: {e.response.headers}") + logger.error(f"Response text: {e.response.text}") + resp_json = try_get_resp_json(e.response) + else: + resp_json = None + raise QuickBooksError( + f"Error: {str(e)}, Status code: {e.response.status_code if e.response else 'N/A'}, Response: {resp_json}", + original_exception=e, + ) + + async def _get(self, endpoint: str, params: Dict[str, Any] = None) -> Dict[str, Any]: + """GET request.""" + return await self._make_request('GET', endpoint, params=params) + + async def _post(self, endpoint: str, data: Dict[str, Any], params: Dict[str, Any] = None) -> Dict[str, Any]: + """POST request.""" + return await self._make_request('POST', endpoint, params=params, json=data) + + async def close(self): + await self.async_session.aclose() diff --git a/mcp_servers/quickbooks/tools/invoices.py b/mcp_servers/quickbooks/tools/invoices.py new file mode 100644 index 00000000..aaa413b7 --- /dev/null +++ b/mcp_servers/quickbooks/tools/invoices.py @@ -0,0 +1,1070 @@ +from typing import Any, Dict, List + +from mcp.types import Tool +import mcp.types as types +from .http_client import QuickBooksHTTPClient + +# Minimal properties for invoice creation (required by QuickBooks) +invoice_properties_minimal = { + "CustomerRefValue": { + "type": "string", + "description": "Customer ID for the invoice" + }, + "CustomerRefName": { + "type": "string", + "description": "Name of the customer associated with the invoice" + }, + "LineItems": { + "type": "array", + "items": { + "type": "object", + "properties": { + "LineId": {"type": "string", "description": "Unique ID for this line item. Required for updates, ignored for creates"}, + "Amount": {"type": "number", "description": "Line total amount"}, + "Description": {"type": "string", "description": "Line item description"}, + "DetailType": {"type": "string", "description": "Line detail type: SalesItemLineDetail, DescriptionOnlyLine, DiscountLine, SubTotalLine", "default": "SalesItemLineDetail"}, + "LineNum": {"type": "number", "description": "Position of the line in the collection"}, + + # SalesItemLineDetail fields + "ItemId": {"type": "string", "description": "Reference to the inventory item"}, + "ItemName": {"type": "string", "description": "Name of the inventory item"}, + "Quantity": {"type": "number", "description": "Quantity of the item"}, + "UnitPrice": {"type": "number", "description": "Unit price of the item"}, + "DiscountRate": {"type": "number", "description": "Discount rate applied to this line as a percentage"}, + "DiscountAmount": {"type": "number", "description": "Discount amount applied to this line"}, + "ServiceDate": {"type": "string", "description": "Date when the service is performed (YYYY-MM-DD format)"}, + + # DescriptionOnlyLine fields + "IsSubtotal": {"type": "boolean", "description": "Set to true for subtotal lines"}, + + # DiscountLineDetail fields + "DiscountPercent": {"type": "number", "description": "Percentage discount as a number (10 for 10%)"}, + "DiscountAccountId": {"type": "string", "description": "Income account used to track discounts"}, + "DiscountAccountName": {"type": "string", "description": "Name of the discount account"}, + "IsPercentBased": {"type": "boolean", "description": "True if the discount is a percentage"}, + + "TaxCodeId": {"type": "string", "description": "Reference to the TaxCode for this item"}, + "TaxCodeName": {"type": "string", "description": "Name of the TaxCode"} + }, + "required": ["Amount"] + } + }, +} + +# Invoice properties mapping (based on QuickBooks API documentation) +invoice_properties_user_define = { + **invoice_properties_minimal, + "DocNumber": { + "type": "string", + "description": "Reference number for the transaction. If not explicitly provided at create time, this field is populated based on the setting of Preferences:CustomTxnNumber" + }, + "TransactionDate": { + "type": "string", + "description": "The date entered by the user when this transaction occurred. Format: yyyy/MM/dd" + }, + "ShipDate": { + "type": "string", + "description": "Date for delivery of goods or services. Format: yyyy/MM/dd" + }, + "DueDate": { + "type": "string", + "description": "Date when the payment of the transaction is due. If date is not provided, the number of days specified in SalesTermRef added the transaction date will be used. Format: yyyy/MM/dd" + }, + "CustomerMemo": { + "type": "string", + "description": "User-entered message to the customer; this message is visible to end user on their transactions" + }, + "BillEmail": { + "type": "string", + "description": "Identifies the e-mail address where the invoice is sent" + }, + "BillEmailCc": { + "type": "string", + "description": "Identifies the carbon copy e-mail address where the invoice is sent" + }, + "ShipFromAddrLine1": { + "type": "string", + "description": "First line of the address where goods are shipped from" + }, + "ShipFromAddrLine2": { + "type": "string", + "description": "Second line of the address where goods are shipped from" + }, + "ShipFromAddrCity": { + "type": "string", + "description": "City name for the shipping address where goods are shipped from" + }, + "ShipFromAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the shipping address where goods are shipped from" + }, + "ShipFromAddrPostalCode": { + "type": "string", + "description": "Postal code for the shipping address where goods are shipped from" + }, + "ShipFromAddrCountry": { + "type": "string", + "description": "Country name for the shipping address where goods are shipped from" + }, + "ShipAddrLine1": { + "type": "string", + "description": "First line of the shipping address where goods must be shipped" + }, + "ShipAddrLine2": { + "type": "string", + "description": "Second line of the shipping address" + }, + "ShipAddrCity": { + "type": "string", + "description": "City name for the shipping address" + }, + "ShipAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the shipping address" + }, + "ShipAddrPostalCode": { + "type": "string", + "description": "Postal code for the shipping address" + }, + "ShipAddrCountry": { + "type": "string", + "description": "Country name for the shipping address" + }, + "BillAddrLine1": { + "type": "string", + "description": "First line of the billing address" + }, + "BillAddrLine2": { + "type": "string", + "description": "Second line of the billing address" + }, + "BillAddrCity": { + "type": "string", + "description": "City name for the billing address" + }, + "BillAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the billing address" + }, + "BillAddrPostalCode": { + "type": "string", + "description": "Postal code for the billing address" + }, + "BillAddrCountry": { + "type": "string", + "description": "Country name for the billing address" + }, + "ShipMethodRefValue": { + "type": "string", + "description": "Reference to the ShipMethod associated with the transaction" + }, + "ShipMethodRefName": { + "type": "string", + "description": "Name of the ShipMethod associated with the transaction" + }, + "CurrencyRefValue": { + "type": "string", + "description": "Three letter ISO code representing the currency" + }, + "CurrencyRefName": { + "type": "string", + "description": "Full name of the currency" + }, + "TrackingNum": { + "type": "string", + "description": "Shipping provider's tracking number for the delivery of the goods" + }, + "Deposit": { + "type": "number", + "description": "The deposit made towards this invoice" + }, + "DepositToAccountRefValue": { + "type": "string", + "description": "Account to which money is deposited" + }, + "DepositToAccountRefName": { + "type": "string", + "description": "Name of the account to which money is deposited" + } +} + +invoice_properties = { + **invoice_properties_user_define, + "Id": { + "type": "string", + "description": "The unique QuickBooks invoice ID" + } +} + +# MCP Tool definitions +create_invoice_tool = Tool( + name="quickbooks_create_invoice", + title="Create Invoice", + description="Create New Invoice - Create a new invoice in QuickBooks. Requires CustomerRef and at least one valid line (SalesItemLine or DescriptionOnlyLine).", + inputSchema={ + "type": "object", + "properties": invoice_properties_minimal, + "required": ["CustomerRefValue", "LineItems"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE"}) +) + +get_invoice_tool = Tool( + name="quickbooks_get_invoice", + title="Get Invoice", + description="Get Single Invoice - Retrieve a specific invoice by ID from QuickBooks with all its details including line items, amounts, and status", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks invoice ID"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE", "readOnlyHint": True}) +) + +list_invoices_tool = Tool( + name="quickbooks_list_invoices", + title="List Invoices", + description="List All Invoices - Retrieve all invoices from QuickBooks with pagination support. Use for browsing or getting overview of invoices", + inputSchema={ + "type": "object", + "properties": { + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1}, + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE", "readOnlyHint": True}) +) + +search_invoices_tool = Tool( + name="quickbooks_search_invoices", + title="Search Invoices", + description="Advanced Invoice Search - Search invoices with powerful filters including dates, amounts, addresses, customer info, and status. Perfect for finding specific invoices based on criteria", + inputSchema={ + "type": "object", + "properties": { + "DocNumber": {"type": "string", "description": "Search by document/invoice number"}, + "CustomerRefValue": {"type": "string", "description": "Search by customer ID"}, + "CustomerName": {"type": "string", "description": "Search by customer name (partial match)"}, + + # Date filters + "TransactionDateFrom": {"type": "string", "description": "Search invoices from this transaction date (YYYY-MM-DD format)"}, + "TransactionDateTo": {"type": "string", "description": "Search invoices to this transaction date (YYYY-MM-DD format)"}, + "DueDateFrom": {"type": "string", "description": "Search invoices from this due date (YYYY-MM-DD format)"}, + "DueDateTo": {"type": "string", "description": "Search invoices to this due date (YYYY-MM-DD format)"}, + "ShipDateFrom": {"type": "string", "description": "Search invoices from this ship date (YYYY-MM-DD format)"}, + "ShipDateTo": {"type": "string", "description": "Search invoices to this ship date (YYYY-MM-DD format)"}, + + # Amount filters + "MinAmount": {"type": "number", "description": "Minimum total amount"}, + "MaxAmount": {"type": "number", "description": "Maximum total amount"}, + "MinBalance": {"type": "number", "description": "Minimum balance amount"}, + "MaxBalance": {"type": "number", "description": "Maximum balance amount"}, + + # Address filters - Billing Address + "BillAddrCity": {"type": "string", "description": "Search by billing address city"}, + "BillAddrState": {"type": "string", "description": "Search by billing address state/province"}, + "BillAddrPostalCode": {"type": "string", "description": "Search by billing address postal code"}, + "BillAddrCountry": {"type": "string", "description": "Search by billing address country"}, + "BillAddrLine1": {"type": "string", "description": "Search by billing address line 1 (partial match)"}, + + # Address filters - Shipping Address + "ShipAddrCity": {"type": "string", "description": "Search by shipping address city"}, + "ShipAddrState": {"type": "string", "description": "Search by shipping address state/province"}, + "ShipAddrPostalCode": {"type": "string", "description": "Search by shipping address postal code"}, + "ShipAddrCountry": {"type": "string", "description": "Search by shipping address country"}, + "ShipAddrLine1": {"type": "string", "description": "Search by shipping address line 1 (partial match)"}, + + # Address filters - Ship From Address + "ShipFromAddrCity": {"type": "string", "description": "Search by ship from address city"}, + "ShipFromAddrState": {"type": "string", "description": "Search by ship from address state/province"}, + "ShipFromAddrPostalCode": {"type": "string", "description": "Search by ship from address postal code"}, + "ShipFromAddrCountry": {"type": "string", "description": "Search by ship from address country"}, + "ShipFromAddrLine1": {"type": "string", "description": "Search by ship from address line 1 (partial match)"}, + + # Pagination + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1} + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE", "readOnlyHint": True}) +) + +update_invoice_tool = Tool( + name="quickbooks_update_invoice", + title="Update Invoice", + description="Update Existing Invoice - Modify an existing invoice in QuickBooks. Automatically handles sync tokens for safe concurrent updates", + inputSchema={ + "type": "object", + "properties": invoice_properties, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE"}) +) + +delete_invoice_tool = Tool( + name="quickbooks_delete_invoice", + title="Delete Invoice", + description="ļøDelete Invoice - Permanently delete an invoice from QuickBooks. Use with caution as this action cannot be undone", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks invoice ID to delete"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE"}) +) + +send_invoice_tool = Tool( + name="quickbooks_send_invoice", + title="Send Invoice", + description="Send Invoice via Email - Send an invoice to customer via email with delivery tracking. Updates email status and delivery info automatically", + inputSchema={ + "type": "object", + "properties": { + "Id": { + "type": "string", + "description": "The QuickBooks invoice ID to send" + }, + "SendTo": { + "type": "string", + "description": "Optional email address to send the invoice to. If not provided, uses the invoice's BillEmail address", + } + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE"}) +) + +void_invoice_tool = Tool( + name="quickbooks_void_invoice", + title="Void Invoice", + description="Void Invoice - Void an existing invoice in QuickBooks. Sets all amounts to zero and marks as 'Voided' while keeping the record for audit trail", + inputSchema={ + "type": "object", + "properties": { + "Id": { + "type": "string", + "description": "The QuickBooks invoice ID to void" + } + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_INVOICE"}) +) + + +def mcp_object_to_invoice_data(**kwargs) -> Dict[str, Any]: + """ + Convert MCP object format to QuickBooks invoice data format. + This function transforms the flat MCP structure to the nested format expected by QuickBooks API. + """ + invoice_data = {} + + # Basic invoice information - direct copy + for field in ['DocNumber', 'ShipDate', 'DueDate', + 'PrintStatus', 'EmailStatus', 'TrackingNum']: + if field in kwargs: + invoice_data[field] = kwargs[field] + + # Handle renamed field: TransactionDate -> TxnDate + if 'TransactionDate' in kwargs: + invoice_data['TxnDate'] = kwargs['TransactionDate'] + + # CustomerMemo needs to be in object format + if 'CustomerMemo' in kwargs: + invoice_data['CustomerMemo'] = {'value': kwargs['CustomerMemo']} + + # Boolean fields + for field in ['ApplyTaxAfterDiscount', 'AllowOnlineACHPayment', 'AllowOnlineCreditCardPayment']: + if field in kwargs: + invoice_data[field] = kwargs[field] + + # Numeric fields + for field in ['Deposit', 'ExchangeRate']: + if field in kwargs: + invoice_data[field] = kwargs[field] + + # String fields + for field in ['GlobalTaxCalculation']: + if field in kwargs: + invoice_data[field] = kwargs[field] + + # Email addresses - convert to structured objects + for email_field in ['BillEmail', 'BillEmailCc']: + if email_field in kwargs: + invoice_data[email_field] = {'Address': kwargs[email_field]} + + # Line items - handle the LineItems parameter with all detail types + if 'LineItems' in kwargs: + lines = [] + for item in kwargs['LineItems']: + line = { + "Amount": item["Amount"], + "DetailType": item.get("DetailType", "SalesItemLineDetail"), + "Description": item.get("Description") + } + + if item.get("LineId"): + line["Id"] = item["LineId"] + if item.get("LineNum"): + line["LineNum"] = item["LineNum"] + + detail_type = item.get("DetailType", "SalesItemLineDetail") + + if detail_type == "SalesItemLineDetail": + sales_detail = {} + if item.get("ItemId"): + sales_detail["ItemRef"] = {"value": item["ItemId"]} + if item.get("ItemName"): + sales_detail["ItemRef"]["name"] = item["ItemName"] + if item.get("Quantity"): + sales_detail["Qty"] = item["Quantity"] + if item.get("UnitPrice"): + sales_detail["UnitPrice"] = item["UnitPrice"] + if item.get("DiscountRate"): + sales_detail["DiscountRate"] = item["DiscountRate"] + if item.get("DiscountAmount"): + sales_detail["DiscountAmt"] = item["DiscountAmount"] + if item.get("ServiceDate"): + sales_detail["ServiceDate"] = item["ServiceDate"] + if item.get("TaxCodeId"): + sales_detail["TaxCodeRef"] = {"value": item["TaxCodeId"]} + if item.get("TaxCodeName"): + sales_detail["TaxCodeRef"]["name"] = item["TaxCodeName"] + + if sales_detail: + line["SalesItemLineDetail"] = sales_detail + + elif detail_type == "GroupLineDetail": + group_detail = {} + if item.get("GroupItemId"): + group_detail["GroupItemRef"] = { + "value": item["GroupItemId"]} + if item.get("GroupItemName"): + group_detail["GroupItemRef"]["name"] = item["GroupItemName"] + if item.get("GroupQuantity"): + group_detail["Quantity"] = item["GroupQuantity"] + + # Handle nested lines for GroupLine + if item.get("GroupLines") and isinstance(item["GroupLines"], list): + group_lines = [] + for group_item in item["GroupLines"]: + group_line = { + "Amount": group_item.get("Amount", 0), + "DetailType": "SalesItemLineDetail" + } + group_sales_detail = {} + if group_item.get("ItemId"): + group_sales_detail["ItemRef"] = { + "value": group_item["ItemId"]} + if group_item.get("Quantity"): + group_sales_detail["Qty"] = group_item["Quantity"] + if group_item.get("UnitPrice"): + group_sales_detail["UnitPrice"] = group_item["UnitPrice"] + if group_item.get("DiscountRate"): + group_sales_detail["DiscountRate"] = group_item["DiscountRate"] + if group_item.get("TaxCodeId"): + group_sales_detail["TaxCodeRef"] = { + "value": group_item["TaxCodeId"]} + + if group_sales_detail: + group_line["SalesItemLineDetail"] = group_sales_detail + group_lines.append(group_line) + + group_detail["Line"] = group_lines + + line["GroupLineDetail"] = group_detail + + elif detail_type == "DescriptionOnlyLine": + description_detail = {} + if item.get("ServiceDate"): + description_detail["ServiceDate"] = item["ServiceDate"] + if item.get("TaxCodeId"): + description_detail["TaxCodeRef"] = { + "value": item["TaxCodeId"]} + if item.get("TaxCodeName"): + description_detail["TaxCodeRef"]["name"] = item["TaxCodeName"] + + line["DescriptionLineDetail"] = description_detail + + elif detail_type == "DiscountLineDetail": + discount_detail = {} + if item.get("IsPercentBased") is not None: + discount_detail["PercentBased"] = item["IsPercentBased"] + if item.get("DiscountPercent"): + discount_detail["DiscountPercent"] = item["DiscountPercent"] + if item.get("DiscountAccountId"): + discount_detail["DiscountAccountRef"] = { + "value": item["DiscountAccountId"]} + if item.get("DiscountAccountName"): + discount_detail["DiscountAccountRef"]["name"] = item["DiscountAccountName"] + if item.get("TaxCodeId"): + discount_detail["TaxCodeRef"] = { + "value": item["TaxCodeId"], "name": item.get("TaxCodeName", "")} + + line["DiscountLineDetail"] = discount_detail + + elif detail_type == "SubTotalLineDetail": + subtotal_detail = {} + if item.get("ItemId"): + subtotal_detail["ItemRef"] = { + "value": item["ItemId"], "name": item.get("ItemName", "")} + + line["SubTotalLineDetail"] = subtotal_detail + + lines.append(line) + invoice_data['Line'] = lines + + # Reference objects - convert separate value/name fields to structured objects + ref_mappings = [ + ('CustomerRef', 'CustomerRefValue', 'CustomerRefName'), + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('ShipMethodRef', 'ShipMethodRefValue', 'ShipMethodRefName'), + ('DepositToAccountRef', 'DepositToAccountRefValue', 'DepositToAccountRefName') + ] + + for ref_name, value_field, name_field in ref_mappings: + if value_field in kwargs: + ref_obj = {'value': kwargs[value_field]} + if name_field in kwargs: + ref_obj['name'] = kwargs[name_field] + invoice_data[ref_name] = ref_obj + + # Address fields - convert flattened fields to structured objects + for addr_type in ['BillAddr', 'ShipAddr', 'ShipFromAddr']: + address_fields = ['Line1', 'Line2', + 'City', 'CountrySubDivisionCode', 'PostalCode', 'Country'] + + has_address = any(kwargs.get(f'{addr_type}{field}') + for field in address_fields) + if has_address: + addr_obj = {} + for field in address_fields: + if kwargs.get(f'{addr_type}{field}'): + addr_obj[field] = kwargs[f'{addr_type}{field}'] + invoice_data[addr_type] = addr_obj + + return invoice_data + + +def invoice_data_to_mcp_object(invoice_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert QuickBooks invoice data format to MCP object format. + This function flattens the nested QuickBooks structure to the flat format expected by MCP tools. + """ + mcp_object = {} + + # Copy basic fields if present + for field in [ + 'Id', 'DocNumber', 'ShipDate', 'DueDate', 'TrackingNum', 'Deposit' + ]: + if field in invoice_data: + mcp_object[field] = invoice_data[field] + + # Copy fields that are preserved in output only (not in input schema) + for field in [ + 'PrintStatus', 'EmailStatus', 'AllowOnlineACHPayment', 'AllowOnlineCreditCardPayment', + 'ApplyTaxAfterDiscount', 'ExchangeRate', 'GlobalTaxCalculation' + ]: + if field in invoice_data: + mcp_object[field] = invoice_data[field] + + # Handle renamed field: TxnDate -> TransactionDate + if 'TxnDate' in invoice_data: + mcp_object['TransactionDate'] = invoice_data['TxnDate'] + + # Handle fields that are output-only (not in input schema but preserved in output) + if 'PrivateNote' in invoice_data: + mcp_object['PrivateNote'] = invoice_data['PrivateNote'] + + # Handle CustomerMemo which might be in object format + if 'CustomerMemo' in invoice_data: + memo = invoice_data['CustomerMemo'] + if isinstance(memo, dict) and 'value' in memo: + mcp_object['CustomerMemo'] = memo['value'] + else: + mcp_object['CustomerMemo'] = memo + + # Handle read-only fields + for field in ['TotalAmt', 'Balance', 'InvoiceLink']: + if field in invoice_data: + mcp_object[field] = invoice_data[field] + + # Email addresses - flatten structured objects + for email_field in ['BillEmail', 'BillEmailCc', 'BillEmailBcc']: + if email_field in invoice_data: + addr = invoice_data[email_field] + if isinstance(addr, dict) and 'Address' in addr: + mcp_object[email_field] = addr['Address'] + + # Line items - flatten all line types to LineItems + if 'Line' in invoice_data and isinstance(invoice_data['Line'], list): + line_items = [] + for line in invoice_data['Line']: + if isinstance(line, dict) and 'Amount' in line: + item = { + 'Amount': line['Amount'], + 'Description': line.get('Description'), + 'LineId': line.get('Id'), + 'LineNum': line.get('LineNum'), + 'DetailType': line.get('DetailType', 'SalesItemLineDetail') + } + + detail_type = line.get('DetailType', 'SalesItemLineDetail') + + # Handle different detail types + if detail_type == 'SalesItemLineDetail' and 'SalesItemLineDetail' in line: + detail = line['SalesItemLineDetail'] + if 'ItemRef' in detail: + item['ItemId'] = detail['ItemRef'].get('value') + item['ItemName'] = detail['ItemRef'].get('name') + if 'Qty' in detail: + item['Quantity'] = detail['Qty'] + if 'UnitPrice' in detail: + item['UnitPrice'] = detail['UnitPrice'] + if 'DiscountRate' in detail: + item['DiscountRate'] = detail['DiscountRate'] + if 'DiscountAmt' in detail: + item['DiscountAmount'] = detail['DiscountAmt'] + if 'ServiceDate' in detail: + item['ServiceDate'] = detail['ServiceDate'] + if 'TaxCodeRef' in detail: + item['TaxCodeId'] = detail['TaxCodeRef'].get('value') + item['TaxCodeName'] = detail['TaxCodeRef'].get( + 'name') + + elif detail_type == 'GroupLineDetail' and 'GroupLineDetail' in line: + detail = line['GroupLineDetail'] + if 'GroupItemRef' in detail: + item['GroupItemId'] = detail['GroupItemRef'].get( + 'value') + item['GroupItemName'] = detail['GroupItemRef'].get( + 'name') + if 'Quantity' in detail: + item['GroupQuantity'] = detail['Quantity'] + + # Handle nested group lines + if 'Line' in detail and isinstance(detail['Line'], list): + group_lines = [] + for group_line in detail['Line']: + if isinstance(group_line, dict) and 'SalesItemLineDetail' in group_line: + group_detail = group_line['SalesItemLineDetail'] + group_item = { + 'ItemId': group_detail.get('ItemRef', {}).get('value'), + 'Quantity': group_detail.get('Qty'), + 'UnitPrice': group_detail.get('UnitPrice'), + 'DiscountRate': group_detail.get('DiscountRate'), + 'TaxCodeId': group_detail.get('TaxCodeRef', {}).get('value') + } + group_lines.append(group_item) + item['GroupLines'] = group_lines + + elif detail_type == 'DescriptionOnlyLine' and 'DescriptionLineDetail' in line: + detail = line['DescriptionLineDetail'] + if 'ServiceDate' in detail: + item['ServiceDate'] = detail['ServiceDate'] + if 'TaxCodeRef' in detail: + item['TaxCodeId'] = detail['TaxCodeRef'].get('value') + item['TaxCodeName'] = detail['TaxCodeRef'].get( + 'name') + # Check if this is a subtotal line + description = line.get('Description', '') + if description and description.startswith('Subtotal:'): + item['IsSubtotal'] = True + + elif detail_type == 'DiscountLineDetail' and 'DiscountLineDetail' in line: + detail = line['DiscountLineDetail'] + if 'DiscountPercent' in detail: + item['DiscountPercent'] = detail['DiscountPercent'] + if 'PercentBased' in detail: + item['IsPercentBased'] = detail['PercentBased'] + if 'DiscountAccountRef' in detail: + item['DiscountAccountId'] = detail['DiscountAccountRef'].get( + 'value') + item['DiscountAccountName'] = detail['DiscountAccountRef'].get( + 'name') + if 'TaxCodeRef' in detail: + item['TaxCodeId'] = detail['TaxCodeRef'].get('value') + item['TaxCodeName'] = detail['TaxCodeRef'].get( + 'name') + + elif detail_type == 'SubTotalLineDetail' and 'SubTotalLineDetail' in line: + detail = line['SubTotalLineDetail'] + if 'ItemRef' in detail: + item['ItemId'] = detail['ItemRef'].get('value') + item['ItemName'] = detail['ItemRef'].get('name') + + line_items.append(item) + mcp_object['LineItems'] = line_items + + # Reference objects - flatten to separate value and name fields + ref_mappings = [ + ('CustomerRef', 'CustomerRefValue', 'CustomerRefName'), + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('SalesTermRef', 'SalesTermRefValue', 'SalesTermRefName'), + ('DepartmentRef', 'DepartmentRefValue', 'DepartmentRefName'), + ('ShipMethodRef', 'ShipMethodRefValue', 'ShipMethodRefName'), + ('DepositToAccountRef', 'DepositToAccountRefValue', 'DepositToAccountRefName'), + ('RecurDataRef', 'RecurDataRefValue', 'RecurDataRefName'), + ('SyncToken', 'SyncToken', None) + ] + + for ref_name, value_field, name_field in ref_mappings: + if ref_name in invoice_data: + ref = invoice_data[ref_name] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object[value_field] = ref['value'] + if name_field and 'name' in ref: + mcp_object[name_field] = ref['name'] + else: + # Handle cases where SyncToken might be directly in invoice_data + mcp_object[value_field] = ref + + # Address fields - flatten structured objects + address_mappings = [ + ('BillAddr', 'BillAddr'), + ('ShipAddr', 'ShipAddr'), + ('ShipFromAddr', 'ShipFromAddr') + ] + + for addr_type, prefix in address_mappings: + if addr_type in invoice_data and isinstance(invoice_data[addr_type], dict): + addr = invoice_data[addr_type] + for field in ['Line1', 'Line2', 'City', 'CountrySubDivisionCode', 'PostalCode', 'Country']: + if field in addr: + mcp_object[f'{prefix}{field}'] = addr[field] + + return mcp_object + + +class InvoiceManager: + def __init__(self, client: QuickBooksHTTPClient): + self.client = client + + async def create_invoice(self, **kwargs) -> Dict[str, Any]: + """Create a new invoice with comprehensive property support.""" + invoice_data = mcp_object_to_invoice_data(**kwargs) + + # Ensure CustomerRef is included + if 'CustomerRef' not in invoice_data and 'CustomerRefValue' in kwargs: + invoice_data['CustomerRef'] = {'value': kwargs['CustomerRefValue']} + if 'CustomerRefName' in kwargs: + invoice_data['CustomerRef']['name'] = kwargs['CustomerRefName'] + + response = await self.client._post('invoice', invoice_data) + return invoice_data_to_mcp_object(response['Invoice']) + + async def get_invoice(self, Id: str) -> Dict[str, Any]: + """Get a specific invoice by ID.""" + response = await self.client._get(f"invoice/{Id}") + return invoice_data_to_mcp_object(response['Invoice']) + + async def list_invoices(self, MaxResults: int = 100, StartPosition: int = 1) -> List[Dict[str, Any]]: + """List all invoices with comprehensive properties and pagination support.""" + query = f"select * from Invoice STARTPOSITION {StartPosition} MAXRESULTS {MaxResults}" + response = await self.client._get('query', params={'query': query}) + + # Handle case when no invoices are returned + if 'Invoice' not in response['QueryResponse']: + return [] + + invoices = response['QueryResponse']['Invoice'] + return [invoice_data_to_mcp_object(invoice) for invoice in invoices] + + async def search_invoices(self, **kwargs) -> List[Dict[str, Any]]: + """ + Search invoices with various filters and pagination support. + + Args: + DocNumber: Search by document/invoice number + CustomerRefValue: Search by customer ID + CustomerName: Search by customer name (partial match) + + # Date filters + TransactionDateFrom/TransactionDateTo: Search by transaction date range + DueDateFrom/DueDateTo: Search by due date range + ShipDateFrom/ShipDateTo: Search by ship date range + + # Amount filters + MinAmount/MaxAmount: Search by total amount range + MinBalance/MaxBalance: Search by balance amount range + + # Address filters (billing, shipping, ship-from) + BillAddrCity, ShipAddrCity, ShipFromAddrCity: Search by city + BillAddrState, ShipAddrState, ShipFromAddrState: Search by state/province + BillAddrPostalCode, ShipAddrPostalCode, ShipFromAddrPostalCode: Search by postal code + BillAddrCountry, ShipAddrCountry, ShipFromAddrCountry: Search by country + BillAddrLine1, ShipAddrLine1, ShipFromAddrLine1: Search by address line 1 (partial match) + + MaxResults: Maximum number of results to return (default: 100) + StartPosition: Starting position for pagination (default: 1) + + Returns: + List of invoices matching the search criteria + """ + # Build WHERE clause conditions + conditions = [] + + # Basic filters + if kwargs.get('DocNumber'): + conditions.append(f"DocNumber = '{kwargs['DocNumber']}'") + + if kwargs.get('CustomerRefValue'): + conditions.append(f"CustomerRef = '{kwargs['CustomerRefValue']}'") + + if kwargs.get('CustomerName'): + # For customer name search, we need to use LIKE operator + customer_name = kwargs['CustomerName'].replace( + "'", "''") # Escape single quotes + conditions.append( + f"CustomerRef IN (SELECT Id FROM Customer WHERE Name LIKE '%{customer_name}%')") + + # Date range filters + if kwargs.get('TransactionDateFrom'): + conditions.append(f"TxnDate >= '{kwargs['TransactionDateFrom']}'") + if kwargs.get('TransactionDateTo'): + conditions.append(f"TxnDate <= '{kwargs['TransactionDateTo']}'") + + if kwargs.get('DueDateFrom'): + conditions.append(f"DueDate >= '{kwargs['DueDateFrom']}'") + if kwargs.get('DueDateTo'): + conditions.append(f"DueDate <= '{kwargs['DueDateTo']}'") + + if kwargs.get('ShipDateFrom'): + conditions.append(f"ShipDate >= '{kwargs['ShipDateFrom']}'") + if kwargs.get('ShipDateTo'): + conditions.append(f"ShipDate <= '{kwargs['ShipDateTo']}'") + + # Amount range filters + if kwargs.get('MinAmount'): + conditions.append(f"TotalAmt >= {kwargs['MinAmount']}") + if kwargs.get('MaxAmount'): + conditions.append(f"TotalAmt <= {kwargs['MaxAmount']}") + + if kwargs.get('MinBalance'): + conditions.append(f"Balance >= {kwargs['MinBalance']}") + if kwargs.get('MaxBalance'): + conditions.append(f"Balance <= {kwargs['MaxBalance']}") + + # Address filters - Note: QuickBooks API has limited support for nested object queries + # For address searches, we'll need to use more complex queries or post-filter results + address_filters = {} + + # Billing Address filters + for field in ['BillAddrCity', 'BillAddrState', 'BillAddrPostalCode', 'BillAddrCountry']: + if kwargs.get(field): + # Map the field name to QB API format + qb_field = field.replace('BillAddr', 'BillAddr.').replace( + 'State', 'CountrySubDivisionCode') + if field.endswith('Line1'): + # For partial match on address lines, we'll post-filter + address_filters[field] = kwargs[field] + else: + conditions.append(f"{qb_field} = '{kwargs[field]}'") + + if kwargs.get('BillAddrLine1'): + address_filters['BillAddrLine1'] = kwargs['BillAddrLine1'] + + # Shipping Address filters + for field in ['ShipAddrCity', 'ShipAddrState', 'ShipAddrPostalCode', 'ShipAddrCountry']: + if kwargs.get(field): + qb_field = field.replace('ShipAddr', 'ShipAddr.').replace( + 'State', 'CountrySubDivisionCode') + if field.endswith('Line1'): + address_filters[field] = kwargs[field] + else: + conditions.append(f"{qb_field} = '{kwargs[field]}'") + + if kwargs.get('ShipAddrLine1'): + address_filters['ShipAddrLine1'] = kwargs['ShipAddrLine1'] + + # Ship From Address filters + for field in ['ShipFromAddrCity', 'ShipFromAddrState', 'ShipFromAddrPostalCode', 'ShipFromAddrCountry']: + if kwargs.get(field): + qb_field = field.replace('ShipFromAddr', 'ShipFromAddr.').replace( + 'State', 'CountrySubDivisionCode') + if field.endswith('Line1'): + address_filters[field] = kwargs[field] + else: + conditions.append(f"{qb_field} = '{kwargs[field]}'") + + if kwargs.get('ShipFromAddrLine1'): + address_filters['ShipFromAddrLine1'] = kwargs['ShipFromAddrLine1'] + + # Build the complete query + base_query = "SELECT * FROM Invoice" + + if conditions: + where_clause = " WHERE " + " AND ".join(conditions) + base_query += where_clause + + # Add pagination + start_position = kwargs.get('StartPosition', 1) + max_results = kwargs.get('MaxResults', 100) + + query = f"{base_query} STARTPOSITION {start_position} MAXRESULTS {max_results}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no invoices are returned + if 'Invoice' not in response['QueryResponse']: + return [] + + invoices = response['QueryResponse']['Invoice'] + results = [invoice_data_to_mcp_object(invoice) for invoice in invoices] + + # Post-filter for address line fields (partial matching) + if address_filters: + filtered_results = [] + for invoice in results: + match = True + + # Check BillAddrLine1 + if address_filters.get('BillAddrLine1'): + bill_line1 = invoice.get('BillAddrLine1', '').lower() + search_term = address_filters['BillAddrLine1'].lower() + if search_term not in bill_line1: + match = False + + # Check ShipAddrLine1 + if address_filters.get('ShipAddrLine1'): + ship_line1 = invoice.get('ShipAddrLine1', '').lower() + search_term = address_filters['ShipAddrLine1'].lower() + if search_term not in ship_line1: + match = False + + # Check ShipFromAddrLine1 + if address_filters.get('ShipFromAddrLine1'): + ship_from_line1 = invoice.get( + 'ShipFromAddrLine1', '').lower() + search_term = address_filters['ShipFromAddrLine1'].lower() + if search_term not in ship_from_line1: + match = False + + if match: + filtered_results.append(invoice) + + return filtered_results + + return results + + async def update_invoice(self, **kwargs) -> Dict[str, Any]: + """Update an existing invoice with comprehensive property support.""" + Id = kwargs.get('Id') + if not Id: + raise ValueError("Id is required for updating an invoice") + + # Auto-fetch current sync token + current_invoice_response = await self.client._get(f"invoice/{Id}") + sync_token = current_invoice_response.get( + 'Invoice', {}).get('SyncToken', '0') + + invoice_data = mcp_object_to_invoice_data(**kwargs) + invoice_data.update({ + "Id": Id, + "SyncToken": sync_token, + "sparse": True, + }) + + response = await self.client._post('invoice', invoice_data) + return invoice_data_to_mcp_object(response['Invoice']) + + async def delete_invoice(self, Id: str) -> Dict[str, Any]: + """Delete an invoice.""" + # Auto-fetch current sync token + current_invoice_response = await self.client._get(f"invoice/{Id}") + current_invoice = current_invoice_response.get('Invoice', {}) + + if not current_invoice: + raise ValueError(f"Invoice with ID {Id} not found") + + sync_token = current_invoice.get('SyncToken', '0') + + # For delete operation, wrap in Invoice object + delete_data = { + "Id": Id, + "SyncToken": sync_token, + } + return await self.client._post("invoice", delete_data, params={'operation': 'delete'}) + + async def send_invoice(self, Id: str, SendTo: str = None) -> Dict[str, Any]: + """ + Send an invoice via email with delivery info and email status updates. + + The Invoice.EmailStatus parameter is set to EmailSent. + The Invoice.DeliveryInfo element is populated with sending information. + The Invoice.BillEmail.Address parameter is updated to the address specified + with the value of the sendTo query parameter, if specified. + + Args: + Id: The QuickBooks invoice ID to send + sendTo: Optional email address to send the invoice to. If not provided, + uses the invoice's BillEmail address. + + Returns: + The invoice response body with updated email status and delivery info. + """ + # Construct the endpoint URL + endpoint = f"invoice/{Id}/send" + + # Build query parameters + params = {} + if SendTo: + params['sendTo'] = SendTo + + # Send request with POST method (empty body as per API spec) + response = await self.client._make_request('POST', endpoint, params=params) + + # The response should contain the updated invoice data + if 'Invoice' in response: + return invoice_data_to_mcp_object(response['Invoice']) + + return response + + async def void_invoice(self, Id: str) -> Dict[str, Any]: + """ + Void an existing invoice in QuickBooks. + + The transaction remains active but all amounts and quantities are zeroed + and the string "Voided" is injected into Invoice.PrivateNote, prepended + to existing text if present. + + Args: + Id: The QuickBooks invoice ID to void + + Returns: + The invoice response body with voided status. + """ + # Auto-fetch current sync token + current_invoice_response = await self.client._get(f"invoice/{Id}") + current_invoice = current_invoice_response.get('Invoice', {}) + + if not current_invoice: + raise ValueError(f"Invoice with ID {Id} not found") + + sync_token = current_invoice.get('SyncToken', '0') + + # For void operation, wrap in Invoice object + void_data = { + "Id": Id, + "SyncToken": sync_token, + } + + response = await self.client._post("invoice", void_data, params={'operation': 'void'}) + + # The response should contain the voided invoice data + if 'Invoice' in response: + return invoice_data_to_mcp_object(response['Invoice']) + + return response + + +# Export tools +tools = [create_invoice_tool, get_invoice_tool, list_invoices_tool, search_invoices_tool, + update_invoice_tool, delete_invoice_tool, send_invoice_tool, void_invoice_tool] diff --git a/mcp_servers/quickbooks/tools/payments.py b/mcp_servers/quickbooks/tools/payments.py new file mode 100644 index 00000000..1d7fd448 --- /dev/null +++ b/mcp_servers/quickbooks/tools/payments.py @@ -0,0 +1,703 @@ +from typing import Any, Dict, List + +from mcp.types import Tool +import mcp.types as types +from .http_client import QuickBooksHTTPClient + +# Minimal properties for payment creation (required by QuickBooks) +payment_properties_minimal = { + "TotalAmt": { + "type": "number", + "description": "Indicates the total amount of the transaction. This includes the total of all the charges, allowances, and taxes." + }, + "CustomerRefValue": { + "type": "string", + "description": "Customer ID for the payment" + }, + "CustomerRefName": { + "type": "string", + "description": "Name of the customer associated with the payment" + } +} + +# Payment properties mapping (based on QuickBooks API documentation) +payment_properties_user_define = { + **payment_properties_minimal, + "TransactionDate": { + "type": "string", + "description": "The date entered by the user when this transaction occurred. For posting transactions, this is the posting date that affects the financial statements. If the date is not supplied, the current date on the server is used. Format: yyyy/MM/dd" + }, + "PaymentRefNum": { + "type": "string", + "description": "The reference number for the payment received. For example, Check # for a check, envelope # for a cash donation. Required for France locales." + }, + "PaymentMethodRefValue": { + "type": "string", + "description": "Reference to a PaymentMethod associated with this transaction" + }, + "PaymentMethodRefName": { + "type": "string", + "description": "Name of the PaymentMethod associated with this transaction" + }, + "DepositToAccountRefValue": { + "type": "string", + "description": "Account to which money is deposited. If you do not specify this account, payment is applied to the Undeposited Funds account." + }, + "DepositToAccountRefName": { + "type": "string", + "description": "Name of the account to which money is deposited" + }, + "CurrencyRefValue": { + "type": "string", + "description": "A three letter string representing the ISO 4217 code for the currency. For example, USD, AUD, EUR, and so on." + }, + "CurrencyRefName": { + "type": "string", + "description": "The full name of the currency" + }, + "ExchangeRate": { + "type": "number", + "description": "The number of home currency units it takes to equal one unit of currency specified by CurrencyRef. Applicable if multicurrency is enabled for the company" + }, + # Credit Card Payment fields - flattened structure with parent node keywords + # CreditChargeInfo fields + "CreditCardPaymentCcExpiryMonth": { + "type": "integer", + "description": "Expiration Month on card, expressed as a number: 1=January, 2=February, etc." + }, + "CreditCardPaymentCcExpiryYear": { + "type": "integer", + "description": "Expiration Year on card, expressed as a 4 digit number 1999, 2003, etc." + }, + "CreditCardPaymentProcessPayment": { + "type": "boolean", + "description": "false or no value-Store credit card information only. true-Store credit card payment transaction information in CreditChargeResponse" + }, + "CreditCardPaymentPostalCode": { + "type": "string", + "description": "Credit card holder billing postal code. Five digits in the USA. Max 30 characters" + }, + "CreditCardPaymentAmount": { + "type": "number", + "description": "The amount processed using the credit card" + }, + "CreditCardPaymentNameOnAcct": { + "type": "string", + "description": "Account holder name, as printed on the card" + }, + "CreditCardPaymentType": { + "type": "string", + "description": "Type of credit card. For example, MasterCard, Visa, Discover, American Express, and so on" + }, + "CreditCardPaymentBillAddrStreet": { + "type": "string", + "description": "Credit card holder billing address of record: the street address to which credit card statements are sent. Max 255 characters" + }, + # CreditChargeResponse fields + "CreditCardPaymentStatus": { + "type": "string", + "description": "Indicates the status of the payment transaction. Possible values include Completed, Unknown." + }, + "CreditCardPaymentAuthCode": { + "type": "string", + "description": "Code returned from the credit card processor to indicate that the charge will be paid by the card issuer. Max 100 characters" + }, + "CreditCardPaymentTransactionAuthorizationTime": { + "type": "string", + "description": "Timestamp indicating the time in which the card processor authorized the transaction. Format: YYYY-MM-DDTHH:MM:SS (Local time zone: YYYY-MM-DDTHH:MM:SS UTC: YYYY-MM-DDT HH:MM:SSZ Specific time zone: YYYY-MM-DDT HH:MM:SS +/- HH:MM)" + }, + "CreditCardPaymentCCTransId": { + "type": "string", + "description": "Unique identifier of the payment transaction. It can be used to track the status of transactions, or to search transactions. Max 100 characters" + } +} + +payment_properties = { + **payment_properties_user_define, + "Id": { + "type": "string", + "description": "The unique QuickBooks payment ID" + } +} + +# MCP Tool definitions +create_payment_tool = Tool( + name="quickbooks_create_payment", + title="Create Payment", + description="Create New Payment - Create a new payment in QuickBooks. Requires TotalAmt and CustomerRef. Can be applied to specific invoices/credit memos or created as unapplied credit.", + inputSchema={ + "type": "object", + "properties": payment_properties_minimal, + "required": ["TotalAmt", "CustomerRefValue"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT"}) +) + +get_payment_tool = Tool( + name="quickbooks_get_payment", + title="Get Payment", + description="Get Single Payment - Retrieve a specific payment by ID from QuickBooks with all its details including line items, amounts, and linked transactions", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks payment ID"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT", "readOnlyHint": True}) +) + +list_payments_tool = Tool( + name="quickbooks_list_payments", + title="List Payments", + description="List All Payments - Retrieve all payments from QuickBooks with pagination support. Use for browsing or getting overview of payments", + inputSchema={ + "type": "object", + "properties": { + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1}, + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT", "readOnlyHint": True}) +) + +search_payments_tool = Tool( + name="quickbooks_search_payments", + title="Search Payments", + description="Advanced Payment Search - Search payments with powerful filters including dates, amounts, customer info, and status. Perfect for finding specific payments based on criteria", + inputSchema={ + "type": "object", + "properties": { + "CustomerRefValue": {"type": "string", "description": "Search by customer ID"}, + "CustomerName": {"type": "string", "description": "Search by customer name (partial match)"}, + "PaymentRefNum": {"type": "string", "description": "Search by payment reference number"}, + + # Date filters + "TransactionDateFrom": {"type": "string", "description": "Search payments from this transaction date (YYYY-MM-DD format)"}, + "TransactionDateTo": {"type": "string", "description": "Search payments to this transaction date (YYYY-MM-DD format)"}, + + # Amount filters + "MinAmount": {"type": "number", "description": "Minimum total amount"}, + "MaxAmount": {"type": "number", "description": "Maximum total amount"}, + "MinUnappliedAmt": {"type": "number", "description": "Minimum unapplied amount"}, + "MaxUnappliedAmt": {"type": "number", "description": "Maximum unapplied amount"}, + + # Reference filters + "PaymentMethodRefValue": {"type": "string", "description": "Search by payment method ID"}, + "DepositToAccountRefValue": {"type": "string", "description": "Search by deposit account ID"}, + + # Pagination + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1} + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT", "readOnlyHint": True}) +) + +update_payment_tool = Tool( + name="quickbooks_update_payment", + title="Update Payment", + description="Update Existing Payment - Modify an existing payment in QuickBooks. Automatically handles sync tokens for safe concurrent updates", + inputSchema={ + "type": "object", + "properties": payment_properties, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT"}) +) + +delete_payment_tool = Tool( + name="quickbooks_delete_payment", + title="Delete Payment", + description="ļøDelete Payment - Permanently delete a payment from QuickBooks. Use with caution as this action cannot be undone", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks payment ID to delete"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT"}) +) + +send_payment_tool = Tool( + name="quickbooks_send_payment", + title="Send Payment", + description="Send Payment via Email - Send a payment receipt to customer via email", + inputSchema={ + "type": "object", + "properties": { + "Id": { + "type": "string", + "description": "The QuickBooks payment ID to send" + }, + "SendTo": { + "type": "string", + "description": "Email address to send the payment receipt to", + } + }, + "required": ["Id", "SendTo"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT"}) +) + +void_payment_tool = Tool( + name="quickbooks_void_payment", + title="Void Payment", + description="Void Payment - Void an existing payment in QuickBooks. Sets all amounts to zero and marks as 'Voided' while keeping the record for audit trail. If funds have been deposited, you must delete the associated deposit object before voiding the payment.", + inputSchema={ + "type": "object", + "properties": { + "Id": { + "type": "string", + "description": "The QuickBooks payment ID to void" + } + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_PAYMENT"}) +) + + +def mcp_object_to_payment_data(**kwargs) -> Dict[str, Any]: + """ + Convert MCP object format to QuickBooks payment data format. + This function transforms the flat MCP structure to the nested format expected by QuickBooks API. + """ + payment_data = {} + + # Basic payment information - direct copy + for field in ['TotalAmt', 'PaymentRefNum', 'ExchangeRate']: + if field in kwargs: + payment_data[field] = kwargs[field] + + # Handle renamed field: TransactionDate -> TxnDate + if 'TransactionDate' in kwargs: + payment_data['TxnDate'] = kwargs['TransactionDate'] + + # Reference objects - convert separate value/name fields to structured objects + ref_mappings = [ + ('CustomerRef', 'CustomerRefValue', 'CustomerRefName'), + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('PaymentMethodRef', 'PaymentMethodRefValue', 'PaymentMethodRefName'), + ('DepositToAccountRef', 'DepositToAccountRefValue', 'DepositToAccountRefName'), + ] + + for ref_name, value_field, name_field in ref_mappings: + if value_field in kwargs: + ref_obj = {'value': kwargs[value_field]} + if name_field in kwargs: + ref_obj['name'] = kwargs[name_field] + payment_data[ref_name] = ref_obj + + # Credit Card Payment information - reconstruct nested structure from flattened fields + credit_card_fields = { + 'CreditCardPaymentCcExpiryMonth': 'CcExpiryMonth', + 'CreditCardPaymentCcExpiryYear': 'CcExpiryYear', + 'CreditCardPaymentProcessPayment': 'ProcessPayment', + 'CreditCardPaymentPostalCode': 'PostalCode', + 'CreditCardPaymentAmount': 'Amount', + 'CreditCardPaymentNameOnAcct': 'NameOnAcct', + 'CreditCardPaymentType': 'Type', + 'CreditCardPaymentBillAddrStreet': 'BillAddrStreet' + } + + credit_charge_response_fields = { + 'CreditCardPaymentStatus': 'Status', + 'CreditCardPaymentAuthCode': 'AuthCode', + 'CreditCardPaymentTransactionAuthorizationTime': ('TxnAuthorizationTime', 'dateTime'), + 'CreditCardPaymentCCTransId': 'CCTransId' + } + + # Build CreditChargeInfo if any credit card fields are present + credit_charge_info = {} + for mcp_field, qb_field in credit_card_fields.items(): + if mcp_field in kwargs: + credit_charge_info[qb_field] = kwargs[mcp_field] + + # Build CreditChargeResponse if any response fields are present + credit_charge_response = {} + for mcp_field, qb_field in credit_charge_response_fields.items(): + if mcp_field in kwargs: + if isinstance(qb_field, tuple): + # Handle nested fields like TxnAuthorizationTime.dateTime + parent_field, child_field = qb_field + if parent_field not in credit_charge_response: + credit_charge_response[parent_field] = {} + credit_charge_response[parent_field][child_field] = kwargs[mcp_field] + else: + credit_charge_response[qb_field] = kwargs[mcp_field] + + # Construct CreditCardPayment object if we have any credit card data + if credit_charge_info or credit_charge_response: + cc_payment = {} + if credit_charge_info: + cc_payment['CreditChargeInfo'] = credit_charge_info + if credit_charge_response: + cc_payment['CreditChargeResponse'] = credit_charge_response + payment_data['CreditCardPayment'] = cc_payment + + return payment_data + + +def payment_data_to_mcp_object(payment_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert QuickBooks payment data format to MCP object format. + This function flattens the nested QuickBooks structure to the flat format expected by MCP tools. + """ + mcp_object = {} + + # Copy basic fields if present + for field in [ + 'Id', 'TotalAmt', 'PaymentRefNum', + 'ExchangeRate', 'UnappliedAmt' + ]: + if field in payment_data: + mcp_object[field] = payment_data[field] + + # Handle fields that are output-only (not in input schema but preserved in output) + for field in ['PrivateNote', 'TransactionLocationType']: + if field in payment_data: + mcp_object[field] = payment_data[field] + + # Handle renamed field: TxnDate -> TransactionDate + if 'TxnDate' in payment_data: + mcp_object['TransactionDate'] = payment_data['TxnDate'] + + # Reference objects - flatten to separate value and name fields + ref_mappings = [ + ('CustomerRef', 'CustomerRefValue', 'CustomerRefName'), + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('PaymentMethodRef', 'PaymentMethodRefValue', 'PaymentMethodRefName'), + ('DepositToAccountRef', 'DepositToAccountRefValue', 'DepositToAccountRefName'), + ('ProjectRef', 'ProjectRefValue', 'ProjectRefName'), + ('TaxExemptionRef', 'TaxExemptionRefValue', 'TaxExemptionRefName'), + ('SyncToken', 'SyncToken', None) + ] + + for ref_name, value_field, name_field in ref_mappings: + if ref_name in payment_data: + ref = payment_data[ref_name] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object[value_field] = ref['value'] + if name_field and 'name' in ref: + mcp_object[name_field] = ref['name'] + else: + # Handle cases where SyncToken might be directly in payment_data + mcp_object[value_field] = ref + + # Line items - flatten to Line array + if 'Line' in payment_data and isinstance(payment_data['Line'], list): + lines = [] + for line in payment_data['Line']: + if isinstance(line, dict): + line_item = {} + if 'Amount' in line: + line_item['Amount'] = line['Amount'] + + if 'LinkedTxn' in line and isinstance(line['LinkedTxn'], list): + linked_txns = [] + for linked_txn in line['LinkedTxn']: + if isinstance(linked_txn, dict): + txn = {} + if 'TxnId' in linked_txn: + txn['TxnId'] = linked_txn['TxnId'] + if 'TxnType' in linked_txn: + txn['TxnType'] = linked_txn['TxnType'] + if 'TxnLineId' in linked_txn: + txn['TxnLineId'] = linked_txn['TxnLineId'] + linked_txns.append(txn) + line_item['LinkedTxn'] = linked_txns + + lines.append(line_item) + mcp_object['Line'] = lines + + # Credit Card Payment information - flatten nested structure + if 'CreditCardPayment' in payment_data: + cc_payment = payment_data['CreditCardPayment'] + if isinstance(cc_payment, dict): + # Flatten CreditChargeInfo fields + if 'CreditChargeInfo' in cc_payment: + charge_info = cc_payment['CreditChargeInfo'] + if isinstance(charge_info, dict): + charge_info_mapping = { + 'CcExpiryMonth': 'CreditCardPaymentCcExpiryMonth', + 'CcExpiryYear': 'CreditCardPaymentCcExpiryYear', + 'ProcessPayment': 'CreditCardPaymentProcessPayment', + 'PostalCode': 'CreditCardPaymentPostalCode', + 'Amount': 'CreditCardPaymentAmount', + 'NameOnAcct': 'CreditCardPaymentNameOnAcct', + 'Type': 'CreditCardPaymentType', + 'BillAddrStreet': 'CreditCardPaymentBillAddrStreet' + } + for qb_field, mcp_field in charge_info_mapping.items(): + if qb_field in charge_info: + mcp_object[mcp_field] = charge_info[qb_field] + + # Flatten CreditChargeResponse fields + if 'CreditChargeResponse' in cc_payment: + charge_response = cc_payment['CreditChargeResponse'] + if isinstance(charge_response, dict): + response_mapping = { + 'Status': 'CreditCardPaymentStatus', + 'AuthCode': 'CreditCardPaymentAuthCode', + 'CCTransId': 'CreditCardPaymentCCTransId' + } + for qb_field, mcp_field in response_mapping.items(): + if qb_field in charge_response: + mcp_object[mcp_field] = charge_response[qb_field] + + # Handle nested TxnAuthorizationTime.dateTime + if 'TxnAuthorizationTime' in charge_response: + txn_auth_time = charge_response['TxnAuthorizationTime'] + if isinstance(txn_auth_time, dict) and 'dateTime' in txn_auth_time: + mcp_object['CreditCardPaymentTransactionAuthorizationTime'] = txn_auth_time['dateTime'] + + # MetaData fields + if 'MetaData' in payment_data and isinstance(payment_data['MetaData'], dict): + metadata = payment_data['MetaData'] + if 'CreateTime' in metadata: + mcp_object['MetaDataCreateTime'] = metadata['CreateTime'] + if 'LastUpdatedTime' in metadata: + mcp_object['MetaDataLastUpdatedTime'] = metadata['LastUpdatedTime'] + + return mcp_object + + +class PaymentManager: + def __init__(self, client: QuickBooksHTTPClient): + self.client = client + + async def create_payment(self, **kwargs) -> Dict[str, Any]: + """Create a new payment with comprehensive property support.""" + payment_data = mcp_object_to_payment_data(**kwargs) + + # Ensure CustomerRef is included + if 'CustomerRef' not in payment_data and 'CustomerRefValue' in kwargs: + payment_data['CustomerRef'] = {'value': kwargs['CustomerRefValue']} + if 'CustomerRefName' in kwargs: + payment_data['CustomerRef']['name'] = kwargs['CustomerRefName'] + + response = await self.client._post('payment', payment_data) + return payment_data_to_mcp_object(response['Payment']) + + async def get_payment(self, Id: str) -> Dict[str, Any]: + """Get a specific payment by ID.""" + response = await self.client._get(f"payment/{Id}") + return payment_data_to_mcp_object(response['Payment']) + + async def list_payments(self, MaxResults: int = 100, StartPosition: int = 1) -> List[Dict[str, Any]]: + """List all payments with comprehensive properties and pagination support.""" + query = f"select * from Payment STARTPOSITION {StartPosition} MAXRESULTS {MaxResults}" + response = await self.client._get('query', params={'query': query}) + + # Handle case when no payments are returned + if 'Payment' not in response['QueryResponse']: + return [] + + payments = response['QueryResponse']['Payment'] + return [payment_data_to_mcp_object(payment) for payment in payments] + + async def search_payments(self, **kwargs) -> List[Dict[str, Any]]: + """ + Search payments with various filters and pagination support. + + Args: + CustomerRefValue: Search by customer ID + CustomerName: Search by customer name (partial match) + PaymentRefNum: Search by payment reference number + + # Date filters + TransactionDateFrom/TransactionDateTo: Search by transaction date range + + # Amount filters + MinAmount/MaxAmount: Search by total amount range + MinUnappliedAmt/MaxUnappliedAmt: Search by unapplied amount range + + # Reference filters + PaymentMethodRefValue: Search by payment method ID + DepositToAccountRefValue: Search by deposit account ID + + MaxResults: Maximum number of results to return (default: 100) + StartPosition: Starting position for pagination (default: 1) + + Returns: + List of payments matching the search criteria + """ + # Build WHERE clause conditions + conditions = [] + + # Basic filters + if kwargs.get('CustomerRefValue'): + conditions.append(f"CustomerRef = '{kwargs['CustomerRefValue']}'") + + if kwargs.get('PaymentRefNum'): + conditions.append(f"PaymentRefNum = '{kwargs['PaymentRefNum']}'") + + if kwargs.get('CustomerName'): + # For customer name search, we need to use a subquery + customer_name = kwargs['CustomerName'].replace( + "'", "''") # Escape single quotes + conditions.append( + f"CustomerRef IN (SELECT Id FROM Customer WHERE Name LIKE '%{customer_name}%')") + + # Date range filters + if kwargs.get('TransactionDateFrom'): + conditions.append(f"TxnDate >= '{kwargs['TransactionDateFrom']}'") + if kwargs.get('TransactionDateTo'): + conditions.append(f"TxnDate <= '{kwargs['TransactionDateTo']}'") + + # Amount range filters + if kwargs.get('MinAmount'): + conditions.append(f"TotalAmt >= {kwargs['MinAmount']}") + if kwargs.get('MaxAmount'): + conditions.append(f"TotalAmt <= {kwargs['MaxAmount']}") + + if kwargs.get('MinUnappliedAmt'): + conditions.append(f"UnappliedAmt >= {kwargs['MinUnappliedAmt']}") + if kwargs.get('MaxUnappliedAmt'): + conditions.append(f"UnappliedAmt <= {kwargs['MaxUnappliedAmt']}") + + # Reference filters + if kwargs.get('PaymentMethodRefValue'): + conditions.append( + f"PaymentMethodRef = '{kwargs['PaymentMethodRefValue']}'") + if kwargs.get('DepositToAccountRefValue'): + conditions.append( + f"DepositToAccountRef = '{kwargs['DepositToAccountRefValue']}'") + + # Build the complete query + base_query = "SELECT * FROM Payment" + + if conditions: + where_clause = " WHERE " + " AND ".join(conditions) + base_query += where_clause + + # Add pagination + start_position = kwargs.get('StartPosition', 1) + max_results = kwargs.get('MaxResults', 100) + + query = f"{base_query} STARTPOSITION {start_position} MAXRESULTS {max_results}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no payments are returned + if 'Payment' not in response['QueryResponse']: + return [] + + payments = response['QueryResponse']['Payment'] + results = [payment_data_to_mcp_object(payment) for payment in payments] + + return results + + async def update_payment(self, **kwargs) -> Dict[str, Any]: + """Update an existing payment with comprehensive property support.""" + Id = kwargs.get('Id') + if not Id: + raise ValueError("Id is required for updating a payment") + + # Auto-fetch current sync token + current_payment_response = await self.client._get(f"payment/{Id}") + sync_token = current_payment_response.get( + 'Payment', {}).get('SyncToken', '0') + + payment_data = mcp_object_to_payment_data(**kwargs) + payment_data.update({ + "Id": Id, + "SyncToken": sync_token, + "sparse": True, + }) + + response = await self.client._post('payment', payment_data) + return payment_data_to_mcp_object(response['Payment']) + + async def delete_payment(self, Id: str) -> Dict[str, Any]: + """Delete a payment.""" + # Auto-fetch current sync token + current_payment_response = await self.client._get(f"payment/{Id}") + current_payment = current_payment_response.get('Payment', {}) + + if not current_payment: + raise ValueError(f"Payment with ID {Id} not found") + + sync_token = current_payment.get('SyncToken', '0') + + # For delete operation, wrap in Payment object + delete_data = { + "Id": Id, + "SyncToken": sync_token, + } + return await self.client._post("payment", delete_data, params={'operation': 'delete'}) + + async def send_payment(self, Id: str, SendTo: str) -> Dict[str, Any]: + """ + Send a payment receipt via email. + + Args: + Id: The QuickBooks payment ID to send + SendTo: Email address to send the payment receipt to + + Returns: + The payment response body. + """ + # Construct the endpoint URL + endpoint = f"payment/{Id}/send" + + # Build query parameters + params = {'sendTo': SendTo} + + # Send request with POST method (empty body as per API spec) + response = await self.client._make_request('POST', endpoint, params=params) + + # The response should contain the updated payment data + if 'Payment' in response: + return payment_data_to_mcp_object(response['Payment']) + + return response + + async def void_payment(self, Id: str) -> Dict[str, Any]: + """ + Void an existing payment in QuickBooks. + + The transaction remains active but all amounts and quantities are zeroed + and the string "Voided" is injected into Payment.PrivateNote, prepended + to existing text if present. If funds for the payment have been deposited, + you must delete the associated deposit object before voiding the payment object. + + Args: + Id: The QuickBooks payment ID to void + + Returns: + The payment response body with voided status. + """ + # Auto-fetch current sync token + current_payment_response = await self.client._get(f"payment/{Id}") + current_payment = current_payment_response.get('Payment', {}) + + if not current_payment: + raise ValueError(f"Payment with ID {Id} not found") + + sync_token = current_payment.get('SyncToken', '0') + + # For void operation, wrap in Payment object + void_data = { + "Id": Id, + "SyncToken": sync_token, + "sparse": True, + } + + response = await self.client._post("payment", void_data, params={'operation': 'void'}) + + # The response should contain the voided payment data + if 'Payment' in response: + return payment_data_to_mcp_object(response['Payment']) + + return response + + +# Export tools +tools = [create_payment_tool, get_payment_tool, list_payments_tool, search_payments_tool, + update_payment_tool, delete_payment_tool, send_payment_tool, void_payment_tool] diff --git a/mcp_servers/quickbooks/tools/vendors.py b/mcp_servers/quickbooks/tools/vendors.py new file mode 100644 index 00000000..b194e80d --- /dev/null +++ b/mcp_servers/quickbooks/tools/vendors.py @@ -0,0 +1,595 @@ +from typing import Any, Dict, List + +from mcp.types import Tool +import mcp.types as types +from .http_client import QuickBooksHTTPClient + +# Minimal properties for vendor creation (required by QuickBooks) +# Either DisplayName or at least one of GivenName, MiddleName, FamilyName is required +vendor_properties_minimal = { + "DisplayName": { + "type": "string", + "description": "The name of the vendor as displayed. Must be unique across all Vendor, Customer, and Employee objects. Cannot be removed with sparse update. If not supplied, the system generates DisplayName by concatenating vendor name components supplied in the request from the following list: GivenName, MiddleName, FamilyName." + }, + "GivenName": { + "type": "string", + "description": "Given name or first name of a person. The DisplayName attribute or at least one of GivenName, MiddleName, FamilyName attributes is required for object create." + }, + "MiddleName": { + "type": "string", + "description": "Middle name of the person. The person can have zero or more middle names. The DisplayName attribute or at least one of GivenName, MiddleName, FamilyName attributes is required for object create." + }, + "FamilyName": { + "type": "string", + "description": "Family name or the last name of the person. The DisplayName attribute or at least one of GivenName, MiddleName, FamilyName attributes is required for object create." + } +} + +# Vendor properties mapping (based on QuickBooks API documentation) +vendor_properties_user_define = { + **vendor_properties_minimal, + "PrimaryEmailAddr": { + "type": "string", + "description": "Primary email address." + }, + "CompanyName": { + "type": "string", + "description": "The name of the company associated with the person or organization." + }, + "PrintOnCheckName": { + "type": "string", + "description": "Name of the person or organization as printed on a check. If not provided, this is populated from DisplayName. Cannot be removed with sparse update." + }, + "PrimaryPhone": { + "type": "string", + "description": "Primary phone number." + }, + "WebAddr": { + "type": "string", + "description": "Website address." + }, + "BusinessNumber": { + "type": "string", + "description": "Also called, PAN (in India) is a code that acts as an identification for individuals, families and corporates, especially for those who pay taxes on their income." + }, + "CurrencyRefValue": { + "type": "string", + "description": "A three letter string representing the ISO 4217 code for the currency. For example, USD, AUD, EUR, and so on." + }, + "CurrencyRefName": { + "type": "string", + "description": "The full name of the currency." + }, + "Vendor1099": { + "type": "boolean", + "description": "This vendor is an independent contractor; someone who is given a 1099-MISC form at the end of the year. A 1099 vendor is paid with regular checks, and taxes are not withheld on their behalf." + }, + "CostRate": { + "type": "number", + "description": "Pay rate of the vendor" + }, + "BillRate": { + "type": "number", + "description": "BillRate can be set to specify this vendor's hourly billing rate." + }, + "TaxIdentifier": { + "type": "string", + "description": "The tax ID of the Person or Organization. The value is masked in responses, exposing only last four characters. For example, the ID of 123-45-6789 is returned as XXXXXXX6789." + }, + "AcctNum": { + "type": "string", + "description": "Name or number of the account associated with this vendor." + }, + "GSTRegistrationType": { + "type": "string", + "description": "For the filing of GSTR, transactions need to be classified depending on the type of vendor from whom the purchase is made. Possible values are: GST_REG_REG, GST_REG_COMP, GST_UNREG, CONSUMER, OVERSEAS, SEZ, DEEMED." + }, + # Billing Address fields + "BillAddrLine1": { + "type": "string", + "description": "First line of the billing address." + }, + "BillAddrLine2": { + "type": "string", + "description": "Second line of the billing address." + }, + "BillAddrCity": { + "type": "string", + "description": "City name for the billing address." + }, + "BillAddrCountry": { + "type": "string", + "description": "Country name for the billing address. For international addresses - countries should be passed as 3 ISO alpha-3 characters or the full name of the country." + }, + "BillAddrCountrySubDivisionCode": { + "type": "string", + "description": "Region within a country for the billing address. For example, state name for USA, province name for Canada." + }, + "BillAddrPostalCode": { + "type": "string", + "description": "Postal code for the billing address." + }, + # Vendor Payment Bank Detail fields + "VendorPaymentBankDetailBankAccountName": { + "type": "string", + "description": "Name on the Bank Account" + }, + "VendorPaymentBankDetailBankBranchIdentifier": { + "type": "string", + "description": "Bank identification number used to identify the Bank Branch. 6 digit value in format xxx-xxx." + }, + "VendorPaymentBankDetailBankAccountNumber": { + "type": "string", + "description": "Vendor's Bank Account number. In response the value is masked and last four digit is only returned" + }, + "VendorPaymentBankDetailStatementText": { + "type": "string", + "description": "Text/note/comment for Remittance" + } +} + +vendor_properties = { + **vendor_properties_user_define, + "Id": { + "type": "string", + "description": "The unique QuickBooks vendor ID" + } +} + +# MCP Tool definitions +create_vendor_tool = Tool( + name="quickbooks_create_vendor", + title="Create Vendor", + description="Create a new vendor in QuickBooks. Either DisplayName or at least one of GivenName, MiddleName, FamilyName is required.", + inputSchema={ + "type": "object", + "properties": vendor_properties_minimal, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR"}) +) + +get_vendor_tool = Tool( + name="quickbooks_get_vendor", + title="Get Vendor", + description="Get a specific vendor by ID from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks vendor ID"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR", "readOnlyHint": True}) +) + +list_vendors_tool = Tool( + name="quickbooks_list_vendors", + title="List Vendors", + description="List all vendors from QuickBooks", + inputSchema={ + "type": "object", + "properties": { + "ActiveOnly": {"type": "boolean", "description": "Return only active vendors", "default": True}, + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR", "readOnlyHint": True}) +) + +search_vendors_tool = Tool( + name="quickbooks_search_vendors", + title="Search Vendors", + description="Search vendors with various filters including name, company, contact info, and status", + inputSchema={ + "type": "object", + "properties": { + "Name": {"type": "string", "description": "Search by vendor name (partial match)"}, + "DisplayName": {"type": "string", "description": "Search by vendor display name (partial match)"}, + "CompanyName": {"type": "string", "description": "Search by company name (partial match)"}, + "GivenName": {"type": "string", "description": "Search by given/first name (partial match)"}, + "FamilyName": {"type": "string", "description": "Search by family/last name (partial match)"}, + "PrimaryEmailAddr": {"type": "string", "description": "Search by email address (partial match)"}, + "PrimaryPhone": {"type": "string", "description": "Search by phone number (partial match)"}, + "Active": {"type": "boolean", "description": "Filter by active status"}, + "Vendor1099": {"type": "boolean", "description": "Filter by 1099 vendor status"}, + "GSTIN": {"type": "string", "description": "Search by GSTIN"}, + "BusinessNumber": {"type": "string", "description": "Search by business number"}, + "AcctNum": {"type": "string", "description": "Search by account number"}, + "GSTRegistrationType": {"type": "string", "description": "Filter by GST registration type"}, + "MaxResults": {"type": "integer", "description": "Maximum number of results to return", "default": 100}, + "StartPosition": {"type": "integer", "description": "Starting position for pagination (1-based)", "default": 1} + }, + "required": [], + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR", "readOnlyHint": True}) +) + +update_vendor_tool = Tool( + name="quickbooks_update_vendor", + title="Update Vendor", + description="Update an existing vendor in QuickBooks. Use activate_vendor/deactivate_vendor for status changes.", + inputSchema={ + "type": "object", + "properties": { + key: value for key, value in vendor_properties.items() + if key != "Active" # Remove Active from update inputs + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR"}) +) + +activate_vendor_tool = Tool( + name="quickbooks_activate_vendor", + title="Activate Vendor", + description="Activate a vendor in QuickBooks (set Active to true)", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks vendor ID to activate"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR"}) +) + +deactivate_vendor_tool = Tool( + name="quickbooks_deactivate_vendor", + title="Deactivate Vendor", + description="Deactivate a vendor from QuickBooks (set Active to false)", + inputSchema={ + "type": "object", + "properties": { + "Id": {"type": "string", "description": "The QuickBooks vendor ID to deactivate"} + }, + "required": ["Id"] + }, + annotations=types.ToolAnnotations(**{"category": "QUICKBOOKS_VENDOR"}) +) + + +def mcp_object_to_vendor_data(**kwargs) -> Dict[str, Any]: + """ + Convert MCP object format to QuickBooks vendor data format. + This function transforms the flat MCP structure to the nested format expected by QuickBooks API. + """ + vendor_data = {} + + # Basic vendor information - direct copy + for field in ['DisplayName', 'GivenName', 'MiddleName', 'FamilyName', + 'CompanyName', 'PrintOnCheckName', 'BusinessNumber', 'Vendor1099', 'CostRate', + 'BillRate', 'TaxIdentifier', 'AcctNum', 'GSTRegistrationType']: + if field in kwargs: + vendor_data[field] = kwargs[field] + + # Handle Active field only for create operation (not for update) + if 'Active' in kwargs: + vendor_data['Active'] = kwargs['Active'] + + # Email address + if 'PrimaryEmailAddr' in kwargs: + vendor_data['PrimaryEmailAddr'] = { + 'Address': kwargs['PrimaryEmailAddr']} + + # Phone numbers + phone_fields = ['PrimaryPhone'] + for field in phone_fields: + if field in kwargs: + vendor_data[field] = {'FreeFormNumber': kwargs[field]} + + # Website address + if 'WebAddr' in kwargs: + vendor_data['WebAddr'] = {'URI': kwargs['WebAddr']} + + # Reference objects - convert separate value/name fields to structured objects + ref_mappings = [ + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName') + # Removed APAccountRef, TermRef from input + ] + + for ref_name, value_field, name_field in ref_mappings: + if value_field in kwargs: + ref_obj = {'value': kwargs[value_field]} + if name_field in kwargs: + ref_obj['name'] = kwargs[name_field] + vendor_data[ref_name] = ref_obj + + # Billing Address + bill_addr_fields = ['BillAddrLine1', 'BillAddrLine2', 'BillAddrCity', 'BillAddrCountry', 'BillAddrCountrySubDivisionCode', 'BillAddrPostalCode'] + + bill_addr = {} + for field in bill_addr_fields: + if field in kwargs: + # Remove the BillAddr prefix + addr_field = field.replace('BillAddr', '') + bill_addr[addr_field] = kwargs[field] + + if bill_addr: + vendor_data['BillAddr'] = bill_addr + + # Vendor Payment Bank Detail + bank_detail_fields = { + 'VendorPaymentBankDetailBankAccountName': 'BankAccountName', + 'VendorPaymentBankDetailBankBranchIdentifier': 'BankBranchIdentifier', + 'VendorPaymentBankDetailBankAccountNumber': 'BankAccountNumber', + 'VendorPaymentBankDetailStatementText': 'StatementText' + } + + bank_detail = {} + for mcp_field, qb_field in bank_detail_fields.items(): + if mcp_field in kwargs: + bank_detail[qb_field] = kwargs[mcp_field] + + if bank_detail: + vendor_data['VendorPaymentBankDetail'] = bank_detail + + return vendor_data + + +def vendor_data_to_mcp_object(vendor_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert QuickBooks vendor data format to MCP object format. + This function flattens the nested QuickBooks structure to the flat format expected by MCP tools. + """ + mcp_object = {} + + # Copy basic fields if present + for field in [ + 'Id', 'DisplayName', 'GivenName', 'MiddleName', 'FamilyName', + 'CompanyName', 'PrintOnCheckName', 'Active', 'BusinessNumber', 'Vendor1099', 'CostRate', + 'BillRate', 'TaxIdentifier', 'AcctNum', 'GSTRegistrationType', + 'Balance' + ]: + if field in vendor_data: + mcp_object[field] = vendor_data[field] + + # Handle fields that are output-only (not in input schema but preserved in output) + for field in ['Title', 'Suffix', 'AlternatePhone', 'Mobile', 'Fax', 'Source', 'GSTIN', 'T4AEligible', 'HasTPAR', 'TaxReportingBasis', 'T5018Eligible']: + if field in vendor_data: + mcp_object[field] = vendor_data[field] + + # Email address + if 'PrimaryEmailAddr' in vendor_data and isinstance(vendor_data['PrimaryEmailAddr'], dict): + if 'Address' in vendor_data['PrimaryEmailAddr']: + mcp_object['PrimaryEmailAddr'] = vendor_data['PrimaryEmailAddr']['Address'] + + # Phone numbers - handle primary phone as input, others as output-only + if 'PrimaryPhone' in vendor_data and isinstance(vendor_data['PrimaryPhone'], dict): + if 'FreeFormNumber' in vendor_data['PrimaryPhone']: + mcp_object['PrimaryPhone'] = vendor_data['PrimaryPhone']['FreeFormNumber'] + + # Handle other phone fields as output-only + phone_mappings = ['AlternatePhone', 'Mobile', 'Fax'] + for field in phone_mappings: + if field in vendor_data and isinstance(vendor_data[field], dict): + if 'FreeFormNumber' in vendor_data[field]: + mcp_object[field] = vendor_data[field]['FreeFormNumber'] + + # Website address + if 'WebAddr' in vendor_data and isinstance(vendor_data['WebAddr'], dict): + if 'URI' in vendor_data['WebAddr']: + mcp_object['WebAddr'] = vendor_data['WebAddr']['URI'] + + # Reference objects - flatten to separate value and name fields + ref_mappings = [ + ('APAccountRef', 'APAccountRefValue', 'APAccountRefName'), + ('TermRef', 'TermRefValue', 'TermRefName'), + ('CurrencyRef', 'CurrencyRefValue', 'CurrencyRefName'), + ('SyncToken', 'SyncToken', None) + ] + + for ref_name, value_field, name_field in ref_mappings: + if ref_name in vendor_data: + ref = vendor_data[ref_name] + if isinstance(ref, dict): + if 'value' in ref: + mcp_object[value_field] = ref['value'] + if name_field and 'name' in ref: + mcp_object[name_field] = ref['name'] + else: + # Handle cases where SyncToken might be directly in vendor_data + mcp_object[value_field] = ref + + # Billing Address + if 'BillAddr' in vendor_data and isinstance(vendor_data['BillAddr'], dict): + bill_addr = vendor_data['BillAddr'] + addr_mappings = { + 'Line1': 'BillAddrLine1', + 'Line2': 'BillAddrLine2', + 'City': 'BillAddrCity', + 'Country': 'BillAddrCountry', + 'CountrySubDivisionCode': 'BillAddrCountrySubDivisionCode', + 'PostalCode': 'BillAddrPostalCode' + } + + for qb_field, mcp_field in addr_mappings.items(): + if qb_field in bill_addr: + mcp_object[mcp_field] = bill_addr[qb_field] + + # Vendor Payment Bank Detail + if 'VendorPaymentBankDetail' in vendor_data and isinstance(vendor_data['VendorPaymentBankDetail'], dict): + bank_detail = vendor_data['VendorPaymentBankDetail'] + bank_mappings = { + 'BankAccountName': 'VendorPaymentBankDetailBankAccountName', + 'BankBranchIdentifier': 'VendorPaymentBankDetailBankBranchIdentifier', + 'BankAccountNumber': 'VendorPaymentBankDetailBankAccountNumber', + 'StatementText': 'VendorPaymentBankDetailStatementText' + } + + for qb_field, mcp_field in bank_mappings.items(): + if qb_field in bank_detail: + mcp_object[mcp_field] = bank_detail[qb_field] + + # MetaData fields + if 'MetaData' in vendor_data and isinstance(vendor_data['MetaData'], dict): + metadata = vendor_data['MetaData'] + if 'CreateTime' in metadata: + mcp_object['MetaDataCreateTime'] = metadata['CreateTime'] + if 'LastUpdatedTime' in metadata: + mcp_object['MetaDataLastUpdatedTime'] = metadata['LastUpdatedTime'] + + return mcp_object + + +class VendorManager: + def __init__(self, client: QuickBooksHTTPClient): + self.client = client + + async def create_vendor(self, **kwargs) -> Dict[str, Any]: + """Create a new vendor with comprehensive property support.""" + vendor_data = mcp_object_to_vendor_data(**kwargs) + + response = await self.client._post('vendor', vendor_data) + return vendor_data_to_mcp_object(response['Vendor']) + + async def get_vendor(self, Id: str) -> Dict[str, Any]: + """Get a specific vendor by ID.""" + response = await self.client._get(f"vendor/{Id}") + return vendor_data_to_mcp_object(response['Vendor']) + + async def list_vendors(self, ActiveOnly: bool = True, MaxResults: int = 100) -> List[Dict[str, Any]]: + """List all vendors with comprehensive properties and pagination support.""" + query = f"select * from Vendor" + + if ActiveOnly: + query += " WHERE Active = true" + + query += f" MAXRESULTS {MaxResults}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no vendors are returned + if 'Vendor' not in response['QueryResponse']: + return [] + + vendors = response['QueryResponse']['Vendor'] + return [vendor_data_to_mcp_object(vendor) for vendor in vendors] + + async def search_vendors(self, **kwargs) -> List[Dict[str, Any]]: + """ + Search vendors with various filters and pagination support. + + Args: + Name: Search by vendor name (partial match) + DisplayName: Search by vendor display name (partial match) + CompanyName: Search by company name (partial match) + GivenName: Search by given/first name (partial match) + FamilyName: Search by family/last name (partial match) + PrimaryEmailAddr: Search by email address (partial match) + PrimaryPhone: Search by phone number (partial match) + Active: Filter by active status + Vendor1099: Filter by 1099 vendor status + GSTIN: Search by GSTIN + BusinessNumber: Search by business number + AcctNum: Search by account number + GSTRegistrationType: Filter by GST registration type + MaxResults: Maximum number of results to return (default: 100) + StartPosition: Starting position for pagination (default: 1) + + Returns: + List of vendors matching the search criteria + """ + # Build WHERE clause conditions + conditions = [] + + # Name-based filters (partial match) + name_filters = { + 'Name': 'Name', + 'DisplayName': 'DisplayName', + 'CompanyName': 'CompanyName', + 'GivenName': 'GivenName', + 'FamilyName': 'FamilyName' + } + + for param, field in name_filters.items(): + if kwargs.get(param): + value = kwargs[param].replace( + "'", "''") # Escape single quotes + conditions.append(f"{field} LIKE '%{value}%'") + + # Exact match filters + exact_filters = { + 'Active': 'Active', + 'Vendor1099': 'Vendor1099', + 'GSTIN': 'GSTIN', + 'BusinessNumber': 'BusinessNumber', + 'AcctNum': 'AcctNum', + 'GSTRegistrationType': 'GSTRegistrationType' + } + + for param, field in exact_filters.items(): + if kwargs.get(param) is not None: + value = kwargs[param] + if isinstance(value, bool): + conditions.append(f"{field} = {str(value).lower()}") + else: + conditions.append(f"{field} = '{value}'") + + # Contact info filters (partial match) + if kwargs.get('PrimaryEmailAddr'): + email = kwargs['PrimaryEmailAddr'].replace("'", "''") + conditions.append(f"PrimaryEmailAddr LIKE '%{email}%'") + + if kwargs.get('PrimaryPhone'): + phone = kwargs['PrimaryPhone'].replace("'", "''") + conditions.append(f"PrimaryPhone LIKE '%{phone}%'") + + # Build the complete query + base_query = "SELECT * FROM Vendor" + + if conditions: + where_clause = " WHERE " + " AND ".join(conditions) + base_query += where_clause + + # Add pagination + start_position = kwargs.get('StartPosition', 1) + max_results = kwargs.get('MaxResults', 100) + + query = f"{base_query} STARTPOSITION {start_position} MAXRESULTS {max_results}" + + response = await self.client._get('query', params={'query': query}) + + # Handle case when no vendors are returned + if 'Vendor' not in response['QueryResponse']: + return [] + + vendors = response['QueryResponse']['Vendor'] + results = [vendor_data_to_mcp_object(vendor) for vendor in vendors] + + return results + + async def update_vendor(self, **kwargs) -> Dict[str, Any]: + """Update an existing vendor with comprehensive property support.""" + Id = kwargs.get('Id') + if not Id: + raise ValueError("Id is required for updating a vendor") + + # Auto-fetch current sync token + current_vendor_response = await self.client._get(f"vendor/{Id}") + sync_token = current_vendor_response.get( + 'Vendor', {}).get('SyncToken', '0') + + vendor_data = mcp_object_to_vendor_data(**kwargs) + vendor_data.update({ + "Id": Id, + "SyncToken": sync_token, + "sparse": True, + }) + + response = await self.client._post('vendor', vendor_data) + return vendor_data_to_mcp_object(response['Vendor']) + + async def activate_vendor(self, Id: str) -> Dict[str, Any]: + """Activate a vendor (set Active to true).""" + return await self.update_vendor(Id=Id, Active=True) + + async def deactivate_vendor(self, Id: str) -> Dict[str, Any]: + """Deactivate a vendor (set Active to false).""" + return await self.update_vendor(Id=Id, Active=False) + + +# Export tools +tools = [create_vendor_tool, get_vendor_tool, list_vendors_tool, search_vendors_tool, + update_vendor_tool, activate_vendor_tool, deactivate_vendor_tool] diff --git a/mcp_servers/reddit_search/.env.example b/mcp_servers/reddit_search/.env.example new file mode 100644 index 00000000..c48db424 Binary files /dev/null and b/mcp_servers/reddit_search/.env.example differ diff --git a/mcp_servers/reddit_search/Dockerfile b/mcp_servers/reddit_search/Dockerfile new file mode 100644 index 00000000..6b054355 --- /dev/null +++ b/mcp_servers/reddit_search/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/reddit_search/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/reddit_search/server.py . +COPY mcp_servers/reddit_search/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5001 + +# Command to run the server +CMD ["python", "server.py"] diff --git a/mcp_servers/reddit_search/README.md b/mcp_servers/reddit_search/README.md new file mode 100644 index 00000000..e890485b --- /dev/null +++ b/mcp_servers/reddit_search/README.md @@ -0,0 +1,119 @@ +# Reddit Search MCP Server + +A Model Context Protocol (MCP) server that enables AI clients to search, retrieve, and interact with Reddit content. Provides semantic search, post creation, comment management, and community discovery capabilities. + +## Features + +- **Semantic Search**: Find relevant subreddits and posts using natural language queries +- **Content Creation**: Create posts and comments programmatically +- **Community Discovery**: Discover relevant subreddits based on topics +- **Rate Limiting**: Built-in rate limiting and retry logic for Reddit API +- **Dual Transport**: Supports both SSE and StreamableHTTP protocols + +## Tools Reference + +| Tool | Description | Input Parameters | Output | Credentials Required | +|------|-------------|------------------|--------|---------------------| +| `reddit_find_subreddits` | Find relevant subreddits based on a query | `query` (string) | Array of subreddit objects with name, description, subscriber_count | Basic API credentials | +| `reddit_search_posts` | Search for posts in a specific subreddit | `subreddit` (string), `query` (string) | Array of post objects with title, score, url, comment_count | Basic API credentials | +| `reddit_get_post_comments` | Get top comments for a specific post | `post_id` (string), `subreddit` (string) | Array of comment objects with author, body, score | Basic API credentials | +| `reddit_find_similar_posts` | Find posts similar to a given post | `post_id` (string), `limit` (number, optional) | Array of similar post objects | Basic API credentials | +| `reddit_create_post` | Create a new text post in a subreddit | `subreddit` (string), `title` (string), `text` (string) | Post creation confirmation with post_id | Basic API credentials + Username/Password | +| `reddit_create_comment` | Create a comment on a post | `post_id` (string), `text` (string) | Comment creation confirmation with comment_id | Basic API credentials + Username/Password | +| `reddit_upvote` | Upvote a post | `post_id` (string) | Upvote confirmation | Basic API credentials + Username/Password | +| `reddit_get_user_posts` | Get recent posts by authenticated user | `limit` (number, optional) | Array of user's post objects | Basic API credentials + Username/Password | + +## Prerequisites + +### Reddit API Credentials + +1. Visit https://www.reddit.com/prefs/apps +2. Click "Create App" or "Create Another App" +3. Choose "script" application type +4. Save the generated `client_id` and `client_secret` + +**For Post Creation/Interaction**: You'll also need your Reddit username and password. + +## Setup + +### 1. Environment Configuration + +Create `.env` file in `mcp_servers/reddit_search/`: + +```bash +REDDIT_MCP_SERVER_PORT=5001 +REDDIT_CLIENT_ID=your_client_id_here +REDDIT_CLIENT_SECRET=your_client_secret_here +REDDIT_USER_AGENT=klavis-mcp/0.1 (+https://klavis.ai) + +# For post creation (optional - only needed for reddit_create_post, reddit_create_comment, reddit_upvote) +REDDIT_USERNAME=your_reddit_username +REDDIT_PASSWORD=your_reddit_password +``` + +### 2. Running the Server + +#### Docker (Recommended) +```bash +# From repository root +docker build -t reddit-mcp-server -f mcp_servers/reddit_search/Dockerfile . +docker run -p 5001:5001 --env-file mcp_servers/reddit_search/.env reddit-mcp-server +``` + +#### Direct Python +```bash +cd mcp_servers/reddit_search +python server.py --port 5001 +``` + +## Cursor IDE Integration + +### 1. Configure MCP Server in Cursor + +Add to `~/.cursor/mcp.json`: +```json +{ + "mcpServers": { + "reddit-search": { + "url": "http://localhost:5001/sse" + } + } +} +``` + +### 2. Test in Cursor Chat + +Try these example queries: + +- "Use reddit_find_subreddits to find subreddits related to machine learning" +- "Use reddit_search_posts in subreddit 'programming' for query 'Python vs JavaScript'" +- "Use reddit_get_post_comments for post_id '1nqa311' in subreddit 'programming'" +- "Use reddit_find_similar_posts for post_id '1nqa311' with limit 5" +- "Use reddit_create_post in subreddit 'test' with title 'Test Post' and text 'This is a test'" + +## API Endpoints + +- **SSE**: `GET /sse` - Server-Sent Events endpoint for MCP communication +- **StreamableHTTP**: `POST /mcp` - StreamableHTTP endpoint for MCP communication + +## Authentication + +The server uses Reddit's OAuth2 client credentials flow. Access tokens are automatically obtained and cached for the duration of the server session. + +## Rate Limiting + +Built-in rate limiting and retry logic: +- Respects Reddit's rate limits (429 responses) +- Implements exponential backoff for failed requests +- Includes jitter to prevent thundering herd effects + +## Dependencies + +- mcp>=1.12.0 +- pydantic +- typing-extensions +- httpx +- click +- python-dotenv +- starlette +- uvicorn[standard] diff --git a/mcp_servers/reddit_search/requirements.txt b/mcp_servers/reddit_search/requirements.txt new file mode 100644 index 00000000..1a395aae --- /dev/null +++ b/mcp_servers/reddit_search/requirements.txt @@ -0,0 +1,9 @@ +mcp>=1.12.0 +pydantic +typing-extensions +httpx +click +python-dotenv +starlette +uvicorn[standard] +# praw \ No newline at end of file diff --git a/mcp_servers/reddit_search/server.py b/mcp_servers/reddit_search/server.py new file mode 100644 index 00000000..006f8064 --- /dev/null +++ b/mcp_servers/reddit_search/server.py @@ -0,0 +1,320 @@ +import contextlib +import json +import logging +import os +from collections.abc import AsyncIterator +from typing import List, Callable, Awaitable, Any + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + find_relevant_subreddits as find_subreddits_impl, + search_subreddit_posts as search_posts_impl, + get_post_and_top_comments as get_comments_impl, + find_similar_posts_reddit as find_similar_impl, + create_post as create_post_impl, + get_user_posts as get_user_posts_impl, + create_comment as create_comment_impl, + upvote as upvote_impl, +) + +from tools.base import init_http_clients, close_http_clients + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +REDDIT_MCP_SERVER_PORT = int(os.getenv("REDDIT_MCP_SERVER_PORT", "5001")) + +@click.command() +@click.option("--port", default=REDDIT_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("reddit-mcp-server") + + tool_implementations: dict[str, Callable[..., Awaitable[Any]]] = { + "reddit_find_subreddits": find_subreddits_impl, + "reddit_search_posts": search_posts_impl, + "reddit_get_post_comments": get_comments_impl, + "reddit_find_similar_posts": find_similar_impl, + "reddit_create_post": create_post_impl, + "reddit_get_user_posts": get_user_posts_impl, + "reddit_create_comment": create_comment_impl, + "reddit_upvote": upvote_impl, + } + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="reddit_find_subreddits", + description="Find relevant subreddits based on a query. Use this first to discover communities when a user does not specify one.", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query to find relevant subreddits" + } + }, + "required": ["query"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="reddit_search_posts", + description="Search for posts in a specific subreddit matching a query.", + inputSchema={ + "type": "object", + "properties": { + "subreddit": { + "type": "string", + "description": "Name of the subreddit to search in (without r/ prefix)" + }, + "query": { + "type": "string", + "description": "Search query to find relevant posts" + } + }, + "required": ["subreddit", "query"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="reddit_get_post_comments", + description="Get the top comments for a specific Reddit post.", + inputSchema={ + "type": "object", + "properties": { + "post_id": { + "type": "string", + "description": "Reddit post ID (without t3_ prefix)" + }, + "subreddit": { + "type": "string", + "description": "Name of the subreddit containing the post" + } + }, + "required": ["post_id", "subreddit"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_POST", "readOnlyHint": True}) + ), + types.Tool( + name="reddit_find_similar_posts", + description="Find posts similar to a given Reddit post using semantic matching.", + inputSchema={ + "type": "object", + "properties": { + "post_id": { + "type": "string", + "description": "Reddit post ID to find similar posts for" + }, + "limit": { + "type": "integer", + "description": "Maximum number of similar posts to return (default: 10, max: 50)", + "default": 10 + } + }, + "required": ["post_id"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_SEARCH", "readOnlyHint": True}) + ), + types.Tool( + name="reddit_create_post", + description="Create a new post in a subreddit.", + inputSchema={ + "type": "object", + "properties": { + "subreddit": { + "type": "string", + "description": "Name of the subreddit to create the post in (without r/ prefix)" + }, + "title": { + "type": "string", + "description": "Title of the post" + }, + "text": { + "type": "string", + "description": "Text of the post" + } + }, + "required": ["subreddit", "title", "text"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_POST"}) + ), + types.Tool( + name="reddit_get_user_posts", + description="Fetches the most recent posts submitted by the authenticated user.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of posts to return (default: 25, max: 100)", + "default": 25 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_USER", "readOnlyHint": True}) + ), + types.Tool( + name="reddit_create_comment", + description="Creates a new comment on a given Reddit post.", + inputSchema={ + "type": "object", + "properties": { + "post_id": { + "type": "string", + "description": "The ID of the post to comment on (without the 't3_' prefix)." + }, + "text": { + "type": "string", + "description": "The Markdown content of the comment." + } + }, + "required": ["post_id", "text"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_COMMENT"}) + ), + types.Tool( + name="reddit_upvote", + description="Upvotes a post.", + inputSchema={ + "type": "object", + "properties": { + "post_id": { + "type": "string", + "description": "The ID of the post to upvote (without the 't3_' prefix)." + } + }, + "required": ["post_id"] + }, + annotations=types.ToolAnnotations(**{"category": "REDDIT_ACTION"}) + ) + ] + + @app.call_tool() + async def call_tool( + name: str, + arguments: dict + ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + tool_func = tool_implementations.get(name) + if not tool_func: + return [types.TextContent(type="text", text=f"Error: Unknown tool '{name}'")] + + try: + # Log the tool call with its arguments + arg_str = ", ".join(f"{k}={v!r}" for k, v in arguments.items()) + logger.info(f"Tool call: {name}({arg_str})") + + # Special handling for limit parameters + if name == "reddit_find_similar_posts": + arguments["limit"] = max(1, min(50, arguments.get("limit", 10))) + elif name == "reddit_get_user_posts": + arguments["limit"] = max(1, min(100, arguments.get("limit", 25))) + + result = await tool_func(**arguments) + + if name == "reddit_create_subreddit" and "troubleshooting" in result: + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + logger.exception(f"Error executing {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + + # Set up SSE transport + sse = SseServerTransport("/messages/") + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse(request.scope, request.receive, request._send) as streams: + await app.run(streams[0], streams[1], app.create_initialization_options()) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager and HTTP clients.""" + await init_http_clients() + async with session_manager.run(): + logger.info("Reddit MCP Server started with dual transports!") + try: + yield + finally: + logger.info("Reddit MCP Server shutting down...") + await close_http_clients() + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/reddit_search/tools/__init__.py b/mcp_servers/reddit_search/tools/__init__.py new file mode 100644 index 00000000..a6e03515 --- /dev/null +++ b/mcp_servers/reddit_search/tools/__init__.py @@ -0,0 +1,24 @@ +# Module exports and imports +from .find_subreddits import find_relevant_subreddits, SubredditInfo +from .search_posts import search_subreddit_posts, PostInfo +from .get_comments import get_post_and_top_comments, PostDetails, CommentInfo +from .find_similar import find_similar_posts_reddit +from .create_post import create_post +from .get_user_posts import get_user_posts +from .create_comment import create_comment +from .upvote import upvote + +__all__ = [ + "find_relevant_subreddits", + "search_subreddit_posts", + "get_post_and_top_comments", + "find_similar_posts_reddit", + "create_post", + "get_user_posts", + "create_comment", + "upvote", + "SubredditInfo", + "PostInfo", + "CommentInfo", + "PostDetails" +] diff --git a/mcp_servers/reddit_search/tools/base.py b/mcp_servers/reddit_search/tools/base.py new file mode 100644 index 00000000..b69f48f5 --- /dev/null +++ b/mcp_servers/reddit_search/tools/base.py @@ -0,0 +1,366 @@ +import os +import logging +import httpx +from typing import Dict, List, TypedDict +import time +import random +import re +import asyncio + +from dotenv import load_dotenv + +load_dotenv() + +logger = logging.getLogger(__name__) + +# load the reddit api key +REDDIT_CLIENT_ID = os.getenv("REDDIT_CLIENT_ID") +REDDIT_CLIENT_SECRET = os.getenv("REDDIT_CLIENT_SECRET") +REDDIT_USERNAME = os.getenv("REDDIT_USERNAME") +REDDIT_PASSWORD = os.getenv("REDDIT_PASSWORD") + + +# base api urls +REDDIT_API_BASE = "https://oauth.reddit.com/" +REDDIT_TOKEN_URL = "https://www.reddit.com/api/v1/access_token" +REDDIT_USER_AGENT = os.getenv("REDDIT_USER_AGENT", "klavis-mcp/0.1 (+https://klavis.ai)") + +# cached access token and client +_access_token: str | None = None +_token_expires_at: float | None = None +_user_access_token: str | None = None +_user_token_expires_at: float | None = None +_async_client: httpx.AsyncClient | None = None +_token_lock: asyncio.Lock = asyncio.Lock() +_user_token_lock: asyncio.Lock = asyncio.Lock() + + +# simple per-process concurrency limiter (env-configurable) +_max_concurrency = int(os.getenv("REDDIT_MAX_CONCURRENCY", "10")) +_request_semaphore = asyncio.Semaphore(_max_concurrency) + + +async def _ensure_async_client() -> httpx.AsyncClient: + global _async_client + if _async_client is None: + _async_client = httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) + return _async_client + + +async def _refresh_token_locked() -> None: + """Refresh OAuth token under lock. Caller must hold _token_lock.""" + global _access_token, _token_expires_at + + if not REDDIT_CLIENT_ID or not REDDIT_CLIENT_SECRET: + raise ValueError("REDDIT_CLIENT_ID and REDDIT_CLIENT_SECRET must be set") + + logger.info("Requesting new Reddit API access token…") + client = await _ensure_async_client() + auth = (REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET) + data = { + "grant_type": "client_credentials", + "scope": "identity submit read" # Required scopes for posting and reading + } + headers = {"User-Agent": REDDIT_USER_AGENT} + resp = await client.post(REDDIT_TOKEN_URL, auth=auth, data=data, headers=headers) + resp.raise_for_status() + token_data = resp.json() + _access_token = token_data.get("access_token") + expires_in = float(token_data.get("expires_in", 3600)) + # refresh a bit early to avoid expiry races + _token_expires_at = time.time() + max(60.0, expires_in - 120.0) + logger.info("Obtained new Reddit API access token.") + + +async def _get_reddit_auth_header() -> dict[str, str]: + """Return Authorization header, refreshing the token if needed.""" + global _access_token, _token_expires_at + + # Fast path: token exists and not near expiry + if _access_token and _token_expires_at and time.time() < _token_expires_at: + return {"Authorization": f"Bearer {_access_token}", "User-Agent": REDDIT_USER_AGENT} + + # Acquire lock to refresh once across awaiters + async with _token_lock: + if _access_token and _token_expires_at and time.time() < _token_expires_at: + + return {"Authorization": f"Bearer {_access_token}", "User-Agent": REDDIT_USER_AGENT} + await _refresh_token_locked() + return {"Authorization": f"Bearer {_access_token}", "User-Agent": REDDIT_USER_AGENT} + + +async def _refresh_user_token_locked() -> None: + """Refresh user OAuth token under lock using password grant. Caller must hold _user_token_lock.""" + global _user_access_token, _user_token_expires_at + + if not all([REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET, REDDIT_USERNAME, REDDIT_PASSWORD]): + raise ValueError("REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET, REDDIT_USERNAME, and REDDIT_PASSWORD must be set for user actions.") + + logger.info("Requesting new user-specific Reddit API access token…") + client = await _ensure_async_client() + auth = (REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET) + data = { + "grant_type": "password", + "username": REDDIT_USERNAME, + "password": REDDIT_PASSWORD, + "scope": "identity submit read vote" + } + headers = {"User-Agent": REDDIT_USER_AGENT} + resp = await client.post(REDDIT_TOKEN_URL, auth=auth, data=data, headers=headers) + resp.raise_for_status() + token_data = resp.json() + _user_access_token = token_data.get("access_token") + expires_in = float(token_data.get("expires_in", 3600)) + _user_token_expires_at = time.time() + max(60.0, expires_in - 120.0) + logger.info("Obtained new user-specific Reddit API access token.") + + +async def _get_reddit_user_auth_header() -> dict[str, str]: + """Return user Authorization header, refreshing the token if needed.""" + global _user_access_token, _user_token_expires_at + + if _user_access_token and _user_token_expires_at and time.time() < _user_token_expires_at: + return {"Authorization": f"Bearer {_user_access_token}", "User-Agent": REDDIT_USER_AGENT} + + async with _user_token_lock: + if _user_access_token and _user_token_expires_at and time.time() < _user_token_expires_at: + return {"Authorization": f"Bearer {_user_access_token}", "User-Agent": REDDIT_USER_AGENT} + await _refresh_user_token_locked() + return {"Authorization": f"Bearer {_user_access_token}", "User-Agent": REDDIT_USER_AGENT} + + +async def reddit_get(path: str, params: Dict | None = None, max_retries: int = 3) -> Dict: + """HTTP GET helper with UA header and async retry/backoff including 429. + + path should start with '/'. + """ + client = await _ensure_async_client() + headers = await _get_reddit_auth_header() + params = params.copy() if params else {} + params.setdefault("raw_json", 1) + + backoff_seconds = 1.0 + last_exc: Exception | None = None + + for attempt in range(max_retries): + async with _request_semaphore: + try: + resp = await client.get(f"{REDDIT_API_BASE}{path}", headers=headers, params=params) + if resp.status_code == 401: + # Token likely expired/invalid; refresh once and retry + async with _token_lock: + await _refresh_token_locked() + headers = await _get_reddit_auth_header() + resp = await client.get(f"{REDDIT_API_BASE}{path}", headers=headers, params=params) + + if resp.status_code == 429: + retry_after = resp.headers.get("Retry-After") + sleep_s = float(retry_after) if retry_after else backoff_seconds + logger.warning(f"Reddit API 429 rate limit. Sleeping {sleep_s}s then retrying…") + await asyncio.sleep(sleep_s + random.uniform(0, 0.25)) + backoff_seconds = min(backoff_seconds * 2, 8) + continue + + resp.raise_for_status() + return resp.json() + except (httpx.RequestError, httpx.HTTPStatusError) as exc: + last_exc = exc + logger.warning(f"Reddit GET {path} failed (attempt {attempt+1}/{max_retries}): {exc}") + await asyncio.sleep(backoff_seconds + random.uniform(0, 0.5)) + backoff_seconds = min(backoff_seconds * 2, 8) + + if last_exc: + raise last_exc + raise RuntimeError("Reddit GET failed unexpectedly with no exception recorded") + + +async def reddit_get_as_user(path: str, params: Dict | None = None, max_retries: int = 3) -> Dict: + """HTTP GET helper for user actions with retry/backoff logic.""" + client = await _ensure_async_client() + headers = await _get_reddit_user_auth_header() + params = params.copy() if params else {} + params.setdefault("raw_json", 1) + + backoff_seconds = 1.0 + last_exc: Exception | None = None + + for attempt in range(max_retries): + async with _request_semaphore: + try: + resp = await client.get(f"{REDDIT_API_BASE}{path}", headers=headers, params=params) + if resp.status_code == 401: + async with _user_token_lock: + await _refresh_user_token_locked() + headers = await _get_reddit_user_auth_header() + resp = await client.get(f"{REDDIT_API_BASE}{path}", headers=headers, params=params) + + if resp.status_code == 429: + retry_after = resp.headers.get("Retry-After") + sleep_s = float(retry_after) if retry_after else backoff_seconds + logger.warning(f"Reddit API 429 rate limit. Sleeping {sleep_s}s then retrying…") + await asyncio.sleep(sleep_s + random.uniform(0, 0.25)) + backoff_seconds = min(backoff_seconds * 2, 8) + continue + + resp.raise_for_status() + return resp.json() + except (httpx.RequestError, httpx.HTTPStatusError) as exc: + last_exc = exc + logger.warning(f"Reddit GET (as user) {path} failed (attempt {attempt+1}/{max_retries}): {exc}") + await asyncio.sleep(backoff_seconds + random.uniform(0, 0.5)) + backoff_seconds = min(backoff_seconds * 2, 8) + + if last_exc: + raise last_exc + raise RuntimeError("Reddit GET (as user) failed unexpectedly with no exception recorded") + + + +async def reddit_post_as_user(path: str, data: Dict, max_retries: int = 3) -> Dict: + """HTTP POST helper for user actions with retry/backoff logic.""" + client = await _ensure_async_client() + headers = await _get_reddit_user_auth_header() + + backoff_seconds = 1.0 + last_exc: Exception | None = None + + for attempt in range(max_retries): + async with _request_semaphore: + try: + resp = await client.post(f"{REDDIT_API_BASE}{path}", headers=headers, data=data) + + if resp.status_code == 401: + async with _user_token_lock: + await _refresh_user_token_locked() + headers = await _get_reddit_user_auth_header() + resp = await client.post(f"{REDDIT_API_BASE}{path}", headers=headers, data=data) + + if resp.status_code == 429: + retry_after = resp.headers.get("Retry-After") + sleep_s = float(retry_after) if retry_after else backoff_seconds + logger.warning(f"Reddit API 429 rate limit. Sleeping {sleep_s}s then retrying…") + await asyncio.sleep(sleep_s + random.uniform(0, 0.25)) + backoff_seconds = min(backoff_seconds * 2, 8) + continue + + resp.raise_for_status() + return resp.json() + + except (httpx.RequestError, httpx.HTTPStatusError) as exc: + last_exc = exc + logger.warning(f"Reddit POST {path} failed (attempt {attempt+1}/{max_retries}): {exc}") + await asyncio.sleep(backoff_seconds + random.uniform(0, 0.5)) + backoff_seconds = min(backoff_seconds * 2, 8) + + if last_exc: + raise last_exc + raise RuntimeError("Reddit POST failed unexpectedly with no exception recorded") + + +# Lifecycle helpers to integrate with server startup/shutdown +async def init_http_clients() -> None: + await _ensure_async_client() + + +async def close_http_clients() -> None: + global _async_client + if _async_client is not None: + try: + await _async_client.aclose() + finally: + _async_client = None + + +_STOPWORDS = { + "the", "a", "an", "and", "or", "vs", "vs.", "to", "for", "of", "on", "in", "with", + "is", "are", "be", "by", "from", "about", "between", +} + + +def normalize_text(value: str) -> str: + return re.sub(r"\s+", " ", re.sub(r"[^\w\s]", " ", value or "")).strip().lower() + + +def tokenize(value: str) -> list[str]: + norm = normalize_text(value) + tokens = [t for t in norm.split() if t and t not in _STOPWORDS] + return tokens + + +def generate_variants(tokens: list[str]) -> list[str]: + # Create simple variants like joined tokens e.g., ["claude", "code"] -> "claudecode" + variants: list[str] = [] + if len(tokens) >= 2: + variants.append("".join(tokens)) + variants.append("-".join(tokens)) + # Add common brand variants heuristically + joined = " ".join(tokens) + if "cursor" in joined and "ai" in joined and "cursorai" not in variants: + variants.append("cursorai") + variants.append("cursor ai") + if "claude" in joined and "code" in joined and "claudecode" not in variants: + variants.append("claudecode") + variants.append("claude code") + return [v for v in variants if v] + + +def build_broad_query(query: str) -> str: + """Build a broader Reddit search query using OR groups and variants. + + If the query looks like a comparison (contains 'vs'), we split into two + groups and AND them to emphasize posts mentioning both sides. + """ + text = normalize_text(query) + parts = re.split(r"\bvs\.?\b", text) + groups: list[list[str]] = [] + for part in parts: + toks = tokenize(part) + if not toks: + continue + variants = generate_variants(toks) + # Group as OR terms inside parentheses + or_terms = toks + variants + # Escape quotes inside each term for Reddit search + group = [f'"{term}"' if " " in term else term for term in or_terms] + groups.append(group) + + if not groups: + return text + + if len(groups) == 1: + return "(" + " OR ".join(groups[0]) + ")" + # Multiple groups -> AND them + return " AND ".join("(" + " OR ".join(g) + ")" for g in groups) + + +def compute_semantic_score(query: str, title: str, selftext: str) -> float: + """Score semantic relatedness via token overlap with light heuristics. + + Higher weight to title matches; small bonus if both sides of a 'vs' query appear. + """ + query_tokens = set(tokenize(query)) + title_tokens = set(tokenize(title)) + body_tokens = set(tokenize(selftext)) + + if not query_tokens: + return 0.0 + + title_overlap = len(query_tokens & title_tokens) + body_overlap = len(query_tokens & body_tokens) + + score = title_overlap * 2.0 + body_overlap * 1.0 + + # If it is a comparison query, ensure both sides appear + parts = re.split(r"\bvs\.?\b", normalize_text(query)) + if len(parts) >= 2: + side_scores = [] + for p in parts[:2]: + toks = set(tokenize(p)) + side_scores.append(len(toks & (title_tokens | body_tokens)) > 0) + if all(side_scores): + score += 2.0 + + return float(score) + + + diff --git a/mcp_servers/reddit_search/tools/create_comment.py b/mcp_servers/reddit_search/tools/create_comment.py new file mode 100644 index 00000000..9b7d5316 --- /dev/null +++ b/mcp_servers/reddit_search/tools/create_comment.py @@ -0,0 +1,54 @@ +import logging +from typing import Dict, Any + +from .base import reddit_post_as_user + +logger = logging.getLogger(__name__) + +async def create_comment(post_id: str, text: str) -> Dict[str, Any]: + """ + Creates a new comment on a given Reddit post. + + Args: + post_id: The ID of the post to comment on (without the 't3_' prefix). + text: The Markdown content of the comment. + """ + post_id = post_id.strip() + text = text.strip() + + if not post_id or not text: + error_msg = "post_id and text are required" + logger.error(error_msg) + return {"success": False, "error": error_msg} + + try: + logger.info(f"Creating comment on post {post_id}") + + payload = { + "api_type": "json", + "thing_id": f"t3_{post_id}", + "text": text, + } + + response = await reddit_post_as_user("/api/comment", data=payload) + + if response.get("json", {}).get("errors"): + errors = response["json"]["errors"] + error_msg = ", ".join([f"{err[0]}: {err[1]}" for err in errors]) + logger.error(f"Failed to create comment on post {post_id}: {error_msg}") + return {"success": False, "error": error_msg} + + # A successful comment creation returns a data structure with the new comment + data = response.get("json", {}).get("data", {}) + if data and data.get("things"): + comment_id = data["things"][0]["data"]["id"] + logger.info(f"Successfully created comment {comment_id} on post {post_id}") + return {"success": True, "comment_id": comment_id} + + logger.error(f"Comment creation on {post_id} seemed to succeed but API response was unexpected.") + return {"success": False, "error": "API did not return expected comment data."} + + except Exception as e: + error_msg = f"Failed to create comment on post {post_id}: {str(e)}" + logger.exception(error_msg) + return {"success": False, "error": error_msg} diff --git a/mcp_servers/reddit_search/tools/create_post.py b/mcp_servers/reddit_search/tools/create_post.py new file mode 100644 index 00000000..bd2f75ad --- /dev/null +++ b/mcp_servers/reddit_search/tools/create_post.py @@ -0,0 +1,75 @@ +import logging +from typing import Dict, Any + +from .base import reddit_post_as_user + +logger = logging.getLogger(__name__) + +async def create_post(subreddit: str, title: str, text: str, **kwargs) -> Dict[str, Any]: + """Write a text post to a subreddit using httpx. + + Args: + subreddit: The subreddit to write the post to (without r/ prefix). + title: The title of the post. + text: The text content of the post. + + Returns: + A dictionary containing success status, post ID, URL, and any error messages. + """ + # Clean and validate inputs + subreddit = subreddit.strip().strip("'\"") + subreddit = subreddit.removeprefix("r/") if subreddit.lower().startswith("r/") else subreddit + title = title.strip() + text = text.strip() + + if not subreddit or not title: + error_msg = "Subreddit and title are required" + logger.error(error_msg) + return {"success": False, "error": error_msg} + + # Validate title length (Reddit limit: 300 characters) + if len(title) > 300: + error_msg = f"Title too long: {len(title)} characters (max 300)" + logger.error(error_msg) + return {"success": False, "error": error_msg} + + try: + logger.info(f"Creating post in r/{subreddit} with title: '{title[:50]}...'") + + payload = { + "api_type": "json", + "kind": "self", + "sr": subreddit, + "title": title, + "text": text, + } + + response = await reddit_post_as_user("/api/submit", data=payload) + + # The Reddit API returns a complex object. We check for success. + if response.get("json", {}).get("errors"): + errors = response["json"]["errors"] + error_msg = ", ".join([f"{err[0]}: {err[1]}" for err in errors]) + logger.error(f"Failed to create post in r/{subreddit}: {error_msg}") + return {"success": False, "error": error_msg} + + # If successful, extract the post info + data = response.get("json", {}).get("data", {}) + post_id = data.get("id") + post_url = data.get("url") + + if not post_id or not post_url: + logger.error(f"Post creation in r/{subreddit} seemed to succeed but no post ID/URL was returned.") + return {"success": False, "error": "API did not return post ID or URL."} + + logger.info(f"Successfully created post {post_id} in r/{subreddit}") + return { + "success": True, + "post_id": post_id, + "url": post_url + } + + except Exception as e: + error_msg = f"Failed to create post in r/{subreddit}: {str(e)}" + logger.exception(error_msg) + return {"success": False, "error": error_msg} \ No newline at end of file diff --git a/mcp_servers/reddit_search/tools/find_similar.py b/mcp_servers/reddit_search/tools/find_similar.py new file mode 100644 index 00000000..2133d4be --- /dev/null +++ b/mcp_servers/reddit_search/tools/find_similar.py @@ -0,0 +1,100 @@ +import logging +from typing import List + +from .base import reddit_get, build_broad_query, compute_semantic_score +from .search_posts import PostInfo + +logger = logging.getLogger(__name__) + + +async def find_similar_posts_reddit(post_id: str, limit: int = 10) -> List[PostInfo]: + """Find posts similar to the given post using its title as the query. + + The search combines subreddit-restricted results and site-wide results, + ranks by semantic similarity and Reddit score, and returns up to `limit`. + """ + # 1) Resolve the post to get its title and subreddit quickly + info = await reddit_get("/api/info", params={"id": f"t3_{post_id}"}) + children = info.get("data", {}).get("children", []) + if not children: + raise ValueError(f"Post with ID '{post_id}' not found") + post_data = children[0].get("data", {}) + title = post_data.get("title", "") + subreddit = post_data.get("subreddit", "") + + if not title: + raise ValueError("Unable to resolve title for the given post") + + broad_q = build_broad_query(title) + + scored: list[tuple[float, PostInfo]] = [] + + # 2) Search in the same subreddit first (if available) + try: + if subreddit: + params_sr = { + "q": broad_q, + "limit": min(max(limit, 5), 50), + "type": "link", + "sort": "relevance", + "restrict_sr": 1, + "subreddit": subreddit, + } + data_sr = await reddit_get("/search", params=params_sr) + for child in data_sr.get("data", {}).get("children", []): + pd = child.get("data", {}) + if pd.get("id") == post_id: + continue # skip the same post + pi: PostInfo = PostInfo( + id=pd.get("id", ""), + subreddit=pd.get("subreddit", subreddit), + title=pd.get("title", ""), + score=int(pd.get("score", 0)), + url=pd.get("url", ""), + comment_count=int(pd.get("num_comments", 0)), + ) + sem = compute_semantic_score(title, pi["title"], pd.get("selftext", "")) + if sem > 0: + scored.append((sem, pi)) + except Exception as exc: + logger.warning(f"Subreddit-scope similar search failed: {exc}") + + # 3) Site-wide search for broader recall + try: + params_all = { + "q": broad_q, + "limit": min(50, max(limit, 10)), + "type": "link", + "sort": "relevance", + } + data_all = await reddit_get("/search", params=params_all) + for child in data_all.get("data", {}).get("children", []): + pd = child.get("data", {}) + if pd.get("id") == post_id: + continue + pi: PostInfo = PostInfo( + id=pd.get("id", ""), + subreddit=pd.get("subreddit", subreddit or pd.get("subreddit", "")), + title=pd.get("title", ""), + score=int(pd.get("score", 0)), + url=pd.get("url", ""), + comment_count=int(pd.get("num_comments", 0)), + ) + sem = compute_semantic_score(title, pi["title"], pd.get("selftext", "")) + if sem > 0: + scored.append((sem, pi)) + except Exception as exc: + logger.warning(f"Site-wide similar search failed: {exc}") + + # 4) Rank by semantic score then Reddit score; dedupe by id + seen: set[str] = set() + results: list[PostInfo] = [] + for s, pi in sorted(scored, key=lambda x: (x[0], x[1]["score"]), reverse=True): + if not pi["id"] or pi["id"] in seen: + continue + seen.add(pi["id"]) + results.append(pi) + if len(results) >= limit: + break + + return results \ No newline at end of file diff --git a/mcp_servers/reddit_search/tools/find_subreddits.py b/mcp_servers/reddit_search/tools/find_subreddits.py new file mode 100644 index 00000000..75480bcc --- /dev/null +++ b/mcp_servers/reddit_search/tools/find_subreddits.py @@ -0,0 +1,31 @@ +import logging +from typing import List, TypedDict + +from .base import reddit_get + +logger = logging.getLogger(__name__) + +class SubredditInfo(TypedDict): + """Structured data for a single subreddit.""" + name: str + subscriber_count: int + description: str + +async def find_relevant_subreddits(query: str) -> List[SubredditInfo]: + """Find subreddits that are relevant to the query and clean up the data.""" + params = {"q": query, "limit": 10, "type": "sr"} + + logger.info(f"Making API call to Reddit to find subreddits for query: '{query}'") + data = await reddit_get("/subreddits/search", params=params) + # Reddit API returns data in listing format: {"data": {"children": [...]}} + subreddits = data["data"]["children"] + + # We loop through the raw results and build a clean list of our SubredditInfo objects. + return [ + SubredditInfo( + name=sub["data"]["display_name"], + subscriber_count=sub["data"]["subscribers"], + description=sub["data"].get("public_description", "No description provided."), + ) + for sub in subreddits + ] \ No newline at end of file diff --git a/mcp_servers/reddit_search/tools/get_comments.py b/mcp_servers/reddit_search/tools/get_comments.py new file mode 100644 index 00000000..69390631 --- /dev/null +++ b/mcp_servers/reddit_search/tools/get_comments.py @@ -0,0 +1,59 @@ +import logging +from typing import List, TypedDict + +from .base import reddit_get + +logger = logging.getLogger(__name__) + +class CommentInfo(TypedDict): + """Structured data for a single comment.""" + author: str + text: str + score: int + +class PostDetails(TypedDict): + """The combined structure for a post and its top comments.""" + title: str + author: str + text: str + score: int + top_comments: List[CommentInfo] + +async def get_post_and_top_comments(post_id: str, subreddit: str) -> PostDetails: + """Gets post and comment details via the Reddit API and cleans the data.""" + params = {"limit": 3, "sort": "top", "raw_json": 1} + + logger.info(f"Making API call to Reddit for comments on post '{post_id}' in subreddit '{subreddit}'") + # Use the comments endpoint directly - this is the correct Reddit API pattern + data = await reddit_get(f"/comments/{post_id}", params=params) + + # Reddit returns an array: [post_listing, comments_listing] + # First element contains the post data, second contains comments + if len(data) < 2: + raise ValueError("Invalid response structure from Reddit API") + + post_listing = data[0]["data"]["children"] + comments_listing = data[1]["data"]["children"] + + if not post_listing: + raise ValueError(f"Post with ID '{post_id}' not found") + + post_data = post_listing[0]["data"] + + # Here we assemble our final, nested PostDetails object from the raw API data. + return PostDetails( + title=post_data["title"], + author=post_data["author"], + text=post_data.get("selftext", "[This post has no text content]"), + score=post_data["score"], + top_comments=[ + CommentInfo( + author=comment["data"].get("author", "[deleted]"), + text=comment["data"].get("body", ""), + score=comment["data"].get("score", 0), + ) + # We add a small check to filter out any empty or deleted comments. + for comment in comments_listing + if comment.get("data", {}).get("body") and comment["data"].get("author") != "[deleted]" + ], + ) \ No newline at end of file diff --git a/mcp_servers/reddit_search/tools/get_user_posts.py b/mcp_servers/reddit_search/tools/get_user_posts.py new file mode 100644 index 00000000..66feab44 --- /dev/null +++ b/mcp_servers/reddit_search/tools/get_user_posts.py @@ -0,0 +1,34 @@ +import logging +from typing import List + +from .base import reddit_get_as_user +from .search_posts import PostInfo + +logger = logging.getLogger(__name__) + +async def get_user_posts(limit: int = 25) -> List[PostInfo]: + """ + Fetches the most recent posts submitted by the authenticated user. + + Args: + limit: The maximum number of posts to return (default: 25, max: 100). + """ + limit = max(1, min(100, limit)) + params = {"limit": limit} + + logger.info(f"Making API call to Reddit to get user submitted posts with limit: {limit}") + data = await reddit_get_as_user("/api/v1/me/submitted", params=params) + + posts = data.get("data", {}).get("children", []) + + return [ + PostInfo( + id=post["data"].get("id", ""), + subreddit=post["data"].get("subreddit", ""), + title=post["data"].get("title", ""), + score=int(post["data"].get("score", 0)), + url=post["data"].get("url", ""), + comment_count=int(post["data"].get("num_comments", 0)), + ) + for post in posts + ] diff --git a/mcp_servers/reddit_search/tools/search_posts.py b/mcp_servers/reddit_search/tools/search_posts.py new file mode 100644 index 00000000..3dfac435 --- /dev/null +++ b/mcp_servers/reddit_search/tools/search_posts.py @@ -0,0 +1,96 @@ +import logging +from typing import List, TypedDict + +from .base import reddit_get, build_broad_query, compute_semantic_score + +logger = logging.getLogger(__name__) + +class PostInfo(TypedDict): + """Structured data for a Reddit post summary.""" + id: str + subreddit: str + title: str + score: int + url: str + comment_count: int + +async def search_subreddit_posts(subreddit: str, query: str) -> List[PostInfo]: + """Search for posts in a subreddit with semantic-style matching. + + Strategy: + - Build a broad Reddit search query (boolean OR groups, detect comparisons) + - Use Reddit's /search with restrict_sr=1 for recall + - Rank locally with token-overlap semantic score + Reddit score fallback + - Fall back to listing endpoints if search yields too few results + """ + # Clean inputs + subreddit = subreddit.strip().strip("'\"") + subreddit = subreddit.removeprefix("r/") if subreddit.lower().startswith("r/") else subreddit + query = query.strip().strip("'\"") + + # Build a broad query string + broad_q = build_broad_query(query) + + matching_posts: list[PostInfo] = [] + scored: list[tuple[float, PostInfo]] = [] + + # 1) Try Reddit's search API scoped to subreddit + try: + params = { + "q": broad_q, + "limit": 50, + "type": "link", + "sort": "relevance", + "restrict_sr": 1, + } + data = await reddit_get("/search", params={**params, "sr_detail": False, "subreddit": subreddit}) + posts = data.get("data", {}).get("children", []) + for post in posts: + pd = post.get("data", {}) + pi: PostInfo = PostInfo( + id=pd.get("id", ""), + subreddit=pd.get("subreddit", subreddit), + title=pd.get("title", ""), + score=int(pd.get("score", 0)), + url=pd.get("url", ""), + comment_count=int(pd.get("num_comments", 0)), + ) + sem_score = compute_semantic_score(query, pi["title"], pd.get("selftext", "")) + scored.append((sem_score, pi)) + except Exception as exc: + logger.warning(f"Subreddit search failed, will try listing-based fallback. Error: {exc}") + + # 2) Fallback to listing endpoints and local filtering if needed + if len(scored) < 10: + try: + hot = (await reddit_get(f"/r/{subreddit}/hot", params={"limit": 25})).get("data", {}).get("children", []) + top = (await reddit_get(f"/r/{subreddit}/top", params={"limit": 25, "t": "month"})).get("data", {}).get("children", []) + for post in hot + top: + pd = post.get("data", {}) + pi: PostInfo = PostInfo( + id=pd.get("id", ""), + subreddit=pd.get("subreddit", subreddit), + title=pd.get("title", ""), + score=int(pd.get("score", 0)), + url=pd.get("url", ""), + comment_count=int(pd.get("num_comments", 0)), + ) + sem_score = compute_semantic_score(query, pi["title"], pd.get("selftext", "")) + if sem_score > 0: + scored.append((sem_score, pi)) + except Exception as exc: + logger.warning(f"Listing fallback failed: {exc}") + + # 3) Rank: semantic score first, then Reddit score + # Deduplicate by id + seen: set[str] = set() + for s, pi in sorted(scored, key=lambda x: (x[0], x[1]["score"]), + reverse=True): + if not pi["id"] or pi["id"] in seen: + continue + seen.add(pi["id"]) + matching_posts.append(pi) + if len(matching_posts) >= 10: + break + + return matching_posts \ No newline at end of file diff --git a/mcp_servers/reddit_search/tools/upvote.py b/mcp_servers/reddit_search/tools/upvote.py new file mode 100644 index 00000000..2c2cfe03 --- /dev/null +++ b/mcp_servers/reddit_search/tools/upvote.py @@ -0,0 +1,38 @@ +import logging +from typing import Dict, Any + +from .base import reddit_post_as_user + +logger = logging.getLogger(__name__) + +async def upvote(post_id: str) -> Dict[str, Any]: + """ + Upvotes a post. + + Args: + post_id: The ID of the post to upvote (without the 't3_' prefix). + """ + post_id = post_id.strip() + if not post_id: + return {"success": False, "error": "post_id is required"} + + try: + logger.info(f"Upvoting post {post_id}") + + payload = { + "id": f"t3_{post_id}", + "dir": 1, # 1 for upvote + "api_type": "json", + } + + # The vote API returns an empty JSON {} on success + await reddit_post_as_user("/api/vote", data=payload) + + logger.info(f"Successfully upvoted post {post_id}") + return {"success": True, "post_id": post_id} + + except Exception as e: + # The API might return errors in a non-200 response, which httpx will raise + error_msg = f"Failed to upvote post {post_id}: {str(e)}" + logger.exception(error_msg) + return {"success": False, "error": error_msg} diff --git a/mcp_servers/report_generation/README.md b/mcp_servers/report_generation/README.md index a3a93870..0a93160e 100644 --- a/mcp_servers/report_generation/README.md +++ b/mcp_servers/report_generation/README.md @@ -1,129 +1,73 @@ -# Klavis ReportGen MCP Server +# Report Generation MCP Server -This directory contains the code for the Klavis Report Generation Model Context Protocol (MCP) server. This server allows users to generate visually appealing web reports based on a simple search query. It leverages AI to find relevant information, synthesize it, and present it in a modern, interactive HTML format. +A Model Context Protocol (MCP) server for automated report generation. Create professional reports and documents using AI-powered report generation capabilities. -## Features +## šŸš€ Quick Start - Run in 30 Seconds -* **AI-Powered Report Generation**: Uses Anthropic's Claude model to create comprehensive and well-structured reports. -* **Web Search Integration**: Utilizes the Firecrawl API to gather up-to-date information from the web based on your query. -* **Database Storage**: Stores generated reports in a Supabase database for easy access and retrieval. -* **MCP Compliant**: Built using the Klavis FastMCP framework for seamless integration with MCP-compatible clients. -* **Easy Deployment**: Can be run easily using Docker or a standard Python environment. +### 🌐 Using Hosted Service (Recommended for Production) -## Getting Started +Get instant access to Report Generation with our managed infrastructure - **no setup required**: -There are two primary ways to run the Klavis ReportGen MCP server locally: using Docker (recommended) or setting up a Python virtual environment. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -### Prerequisites - -Regardless of the method you choose, you will need to configure environment variables. - -1. Copy the example environment file: - ```bash - cp .env.example .env - ``` -2. Edit the `.env` file and add your API keys and Supabase credentials: - * `ANTHROPIC_API_KEY`: Your API key for the Anthropic API. - * `SUPABASE_URL`: Your Supabase project URL. - * `SUPABASE_API_KEY`: Your Supabase service role key or anon key (depending on your table policies). - * `FIRECRAWL_API_KEY`: Your API key for the Firecrawl API. - -### Option 1: Running with Docker (Recommended) - -Docker provides a containerized environment with all dependencies included. - -1. **Build the Docker image:** - Navigate to the root directory of the Klavis project (the directory containing the `mcp_servers` folder) and run the following command: - ```bash - docker build -t klavis-reportgen-mcp -f mcp_servers/report_generation/Dockerfile . - ``` - *Note: The `-t klavis-reportgen-mcp` tags the image with a descriptive name. The `-f` flag specifies the path to the Dockerfile, and `.` indicates the build context (the Klavis project root).* - -2. **Run the Docker container:** - Make sure your `.env` file is present in the `mcp_servers/report_generation` directory. The Dockerfile is configured to copy this file into the image. - ```bash - docker run -p 5000:5000 --env-file mcp_servers/report_generation/.env klavis-reportgen-mcp - ``` - * `-p 5000:5000` maps port 5000 on your host machine to port 5000 in the container. - * `--env-file mcp_servers/report_generation/.env` loads the environment variables from your `.env` file into the container. - -The server should now be running and accessible at `http://localhost:5000`. - -### Option 2: Running with Python Virtual Environment +```bash +pip install klavis +# or +npm install klavis +``` -If you prefer not to use Docker, you can run the server directly using Python. +```python +from klavis import Klavis -1. **Navigate to the server directory:** - ```bash - cd mcp_servers/report_generation - ``` +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("REPORT_GENERATION", "user123") +``` -2. **Create and activate a virtual environment:** - ```bash - python -m venv venv - # On Windows: - # .\venv\Scripts\activate - # On macOS/Linux: - # source venv/bin/activate - ``` +### 🐳 Using Docker (For Self-Hosting) -3. **Install dependencies:** - ```bash - pip install -r requirements.txt - ``` +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/report-generation-mcp-server:latest -4. **Ensure `.env` file is present:** - Make sure the `.env` file you created earlier is in the current directory (`mcp_servers/report_generation`). The server loads variables from this file automatically. -5. **Run the server:** - ```bash - python server.py - ``` +# Run Report Generation MCP Server +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/report-generation-mcp-server:latest +``` -The server should now be running and accessible at `http://localhost:5000`. +**Setup:** Uses your Klavis API key for AI-powered report generation capabilities. -## Usage +## šŸ› ļø Available Tools -Once the server is running, you can interact with it using any MCP-compatible client by connecting to `http://localhost:5000`. The available tool is `generate_web_reports`, which accepts a `query` string as input. +- **Report Creation**: Generate professional reports from data and templates +- **Template Management**: Use and customize report templates +- **Data Integration**: Integrate data from various sources into reports +- **Format Options**: Export reports in multiple formats (PDF, Word, HTML) +- **Automated Insights**: Generate AI-powered insights and analysis -Example interaction (using a hypothetical MCP client): +## šŸ“š Documentation & Support -```python -# Connect to the server -client = MCPClient("http://localhost:5000/") +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -# Call the report generation tool -result = client.call_tool("generate_web_reports", query="latest advancements in large language models") +## šŸ¤ Contributing -# Print the result (which should contain a URL to the report) -print(result) -# Expected Output: "Report generated successfully. Please return the url to the user so that they can view the report at http://www.klavis.ai/generated-reports/{report_id}" -``` +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. -## License +## šŸ“œ License -This project is licensed under the MIT License. +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. --- -MIT License - -Copyright (c) 2024 Klavis AI - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/report_generation/requirements.txt b/mcp_servers/report_generation/requirements.txt index 26a8368d..a2782ce4 100644 --- a/mcp_servers/report_generation/requirements.txt +++ b/mcp_servers/report_generation/requirements.txt @@ -3,5 +3,5 @@ httpx>=0.27.0 supabase>=2.3.0 python-dotenv>=1.0.0 asyncio>=3.4.3 -mcp>=1.6.0 +mcp==1.11.0 requests>=2.31.0 \ No newline at end of file diff --git a/mcp_servers/report_generation/server.py b/mcp_servers/report_generation/server.py index 9aa96158..62099eb5 100644 --- a/mcp_servers/report_generation/server.py +++ b/mcp_servers/report_generation/server.py @@ -1,29 +1,47 @@ -import os +import contextlib import logging +import os +from collections.abc import AsyncIterator from typing import List, Dict, Any, Optional, Annotated +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv import anthropic import requests from supabase import create_client, Client -from mcp.server.fastmcp import FastMCP from pydantic import Field # Configure logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("report_generation") +logger = logging.getLogger(__name__) + +load_dotenv() -# Load environment variables +# Configuration ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY") +if not ANTHROPIC_API_KEY: + raise ValueError("ANTHROPIC_API_KEY environment variable is required") + SUPABASE_URL = os.environ.get("SUPABASE_URL") +if not SUPABASE_URL: + raise ValueError("SUPABASE_URL environment variable is required") + SUPABASE_API_KEY = os.environ.get("SUPABASE_API_KEY") +if not SUPABASE_API_KEY: + raise ValueError("SUPABASE_API_KEY environment variable is required") + FIRECRAWL_API_KEY = os.environ.get("FIRECRAWL_API_KEY") +if not FIRECRAWL_API_KEY: + raise ValueError("FIRECRAWL_API_KEY environment variable is required") -# Initialize MCP Server -mcp = FastMCP( - "KlavisReportGen", - instructions="Generate visually appealing JavaScript web reports from search queries with Klavis AI.", - port=5000, -) +REPORT_GENERATION_MCP_SERVER_PORT = int(os.getenv("REPORT_GENERATION_MCP_SERVER_PORT", "5000")) # Report generation prompt template REPORT_GENERATION_PROMPT = """You are a world class web report generation agent build by Klavis AI. @@ -54,7 +72,6 @@ {article_content} """ - async def generate_report_with_claude(article_content: str) -> str: """ Generate web report using Claude 3.7 @@ -98,7 +115,6 @@ async def generate_report_with_claude(article_content: str) -> str: logger.error(f"Error generating report: {str(e)}") raise - async def store_report_in_supabase(html_content: str) -> Dict[str, Any]: """ Store the generated report in Supabase @@ -132,7 +148,6 @@ async def store_report_in_supabase(html_content: str) -> Dict[str, Any]: logger.error(f"Error storing report: {str(e)}") raise - def search_firecrawl(query: str, limit: int = 3) -> List[str]: """ Search for URLs using Firecrawl search API @@ -182,8 +197,6 @@ def search_firecrawl(query: str, limit: int = 3) -> List[str]: logger.error(f"Error in search_firecrawl: {str(e)}") raise - -@mcp.tool() async def generate_web_reports( query: Annotated[ str, @@ -199,6 +212,8 @@ async def generate_web_reports( A URL to the generated report. The url will be http://www.klavis.ai/generated-reports/{report_id}. """ try: + logger.info(f"Executing tool: generate_web_reports with query: {query}") + # 1. Search for relevant URLs url_to_content = search_firecrawl(query) @@ -206,21 +221,164 @@ async def generate_web_reports( for url, content in url_to_content.items(): all_articles += f"Article URL: {url}\nArticle Title: {content['title']}\nArticle Content: {content['content']}\n\n" - # 3. Generate reports + # 2. Generate report generated_code = await generate_report_with_claude(all_articles) - # 4. Store in Supabase + # 3. Store in Supabase report_id = await store_report_in_supabase(generated_code) return f"Report generated successfully. Please return the url to the user so that they can view the report at http://www.klavis.ai/generated-reports/{report_id}" except Exception as e: logger.error(f"Error in generate_web_reports: {str(e)}") - return {"success": False, "error": str(e)} - - -def main(): - mcp.run(transport="sse") - + return f"Error generating report: {str(e)}" + +@click.command() +@click.option("--port", default=REPORT_GENERATION_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("report-generation-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="generate_web_reports", + description="Generate web reports based on a search query. Searches for relevant articles using the query, creates a visually appealing JavaScript web page that consolidates content from the top search results.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Search query to find relevant articles for the report.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "REPORT_GENERATION_REPORT"}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "generate_web_reports": + query = arguments.get("query") + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + try: + result = await generate_web_reports(query) + return [ + types.TextContent( + type="text", + text=result, + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 if __name__ == "__main__": main() diff --git a/mcp_servers/resend/.eslintrc.json b/mcp_servers/resend/.eslintrc.json new file mode 100644 index 00000000..6a1b1376 --- /dev/null +++ b/mcp_servers/resend/.eslintrc.json @@ -0,0 +1,11 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/resend/Dockerfile b/mcp_servers/resend/Dockerfile index e6cec65a..5a14a1d2 100644 --- a/mcp_servers/resend/Dockerfile +++ b/mcp_servers/resend/Dockerfile @@ -1,11 +1,18 @@ FROM node:22.12-alpine AS builder -COPY mcp_servers/resend /app - +# Set the working directory inside the container WORKDIR /app -RUN --mount=type=cache,target=/root/.npm npm install +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/resend/package.json mcp_servers/resend/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/resend . +# Build the application using TypeScript RUN npm run build FROM node:22-alpine AS release diff --git a/mcp_servers/resend/README.md b/mcp_servers/resend/README.md index c3dc673f..035632b9 100644 --- a/mcp_servers/resend/README.md +++ b/mcp_servers/resend/README.md @@ -1,114 +1,73 @@ -# Resend Email Sending MCP Server šŸ’Œ - -This project provides a Model Context Protocol (MCP) server designed to send emails using the [Resend API](https://resend.com/). It allows AI agents like Cursor or Claude Desktop assistants to compose and send emails directly, streamlining your workflow. - -This server is derived from the original [resend/mcp-send-email](https://github.com/resend/mcp-send-email) repository and is intended for open-source distribution under the MIT License. +# Resend MCP Server -## Features +A Model Context Protocol (MCP) server for Resend email service integration. Send transactional emails with attachments and rich formatting using Resend's API. -* Send plain text and HTML emails via Resend. -* Schedule emails for future delivery. -* Add CC and BCC recipients. -* Configure reply-to addresses. -* Requires sender email verification through Resend. - -## Prerequisites - -* **Resend Account & API Key:** You need a [Resend account](https://resend.com/) and an API key. -* **Verified Domain/Email:** You must authorize Resend to send emails from your domain or specific email address. Follow the [Resend domain verification guide](https://resend.com/docs/introduction/getting-started/authentication#verify-your-domain). - -## Setup and Running - -There are two primary ways to run this MCP server: using Docker (recommended) or running it locally with Node.js. - -### 1. Using Docker (Recommended) - -This method encapsulates the server and its dependencies in a container. - -1. **Build the Docker Image:** - Navigate to the root directory of the `klavis` project (the parent directory of `mcp_servers`) in your terminal and run: - ```bash - docker build -t resend-mcp -f mcp_servers/resend/Dockerfile . - ``` - -2. **Run the Docker Container:** - Replace `YOUR_RESEND_API_KEY` with your actual Resend API key. - ```bash - docker run -d -p 5000:5000 -e RESEND_API_KEY=YOUR_RESEND_API_KEY --name resend-mcp-server resend-mcp - ``` - * `-d`: Runs the container in detached mode (in the background). - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container (the server listens on port 5000). - * `-e RESEND_API_KEY=...`: Passes your Resend API key as an environment variable to the container. - * `--name resend-mcp-server`: Assigns a convenient name to the container. - * `resend-mcp`: The name of the image you built. - - The server is now running and accessible at `http://localhost:5000`. - -### 2. Running Locally with Node.js - -This method requires Node.js and npm installed on your system. - -1. **Install Dependencies:** - Navigate to the `mcp_servers/resend` directory in your terminal: - ```bash - cd mcp_servers/resend - npm install - ``` - -2. **Configure Environment Variables:** - Copy the example environment file: - ```bash - cp .env.example .env - ``` - Edit the newly created `.env` file and replace the placeholder with your actual Resend API key: - ``` - RESEND_API_KEY=YOUR_RESEND_API_KEY - ``` - -3. **Build the Server:** - Compile the TypeScript code to JavaScript: - ```bash - npm run build - ``` - This will create a `build` directory containing the compiled code (e.g., `build/index.js`). - -4. **Run the Server:** - Start the server: - ```bash - node build/index.js - ``` - The server will start, load the API key from the `.env` file, and listen on port 5000. You should see a message like `server running on port 5000`. - -## Connecting to Clients (e.g., Cursor) - -Once the server is running (either via Docker or locally), you need to configure your AI client (like Cursor) to use it. - -1. Go to your client's MCP server settings (e.g., in Cursor: Settings -> MCP -> Add new MCP server). -2. Configure the server connection: - * **Name:** Choose a descriptive name (e.g., "Resend Email Sender"). - * **Type:** Select "http" or "sse" (this server uses SSE - Server-Sent Events). - * **URL:** Enter the address where the server is running. - * If using Docker or running locally: `http://localhost:5000/sse` - * **Authentication:** This server expects the API key via the `RESEND_API_KEY` environment variable (handled by Docker `run -e` or the local `.env` file), *not* typically via MCP client headers. However, consult your specific client's documentation if it offers ways to pass environment variables or if you need to modify the server (`index.ts`) to accept keys via headers (though the environment variable method is generally preferred for security). - -3. **Usage:** - You can now instruct your AI agent to use the "send-email" tool provided by this server. For example, you could provide email details (to, from, subject, body) and ask the agent to "send this email using the Resend Email Sender". The agent will interact with your running MCP server to send the email via your Resend account. - - **Important Tool Arguments:** - * `from`: The *verified* sender email address associated with your Resend account. - * `to`: The recipient's email address. - * `subject`: The email subject. - * `text`: The plain text body of the email. - * `html` (optional): The HTML body of the email. - * `cc`, `bcc`, `replyTo`, `scheduledAt` (optional): Ask the user before using these. - -## Development - -To modify or contribute to the server: - -1. Clone the repository. -2. Navigate to `mcp_servers/resend`. -3. Install dependencies: `npm install` -4. Make your changes (primarily in `index.ts`). -5. Build the code: `npm run build` -6. Run locally using the Node.js instructions above. \ No newline at end of file +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Resend with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("RESEND", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/resend-mcp-server:latest + + +# Run Resend MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/resend-mcp-server:latest +``` + +**API Key Setup:** Get your Resend API key from the [Resend Dashboard](https://resend.com/api-keys). + +## šŸ› ļø Available Tools + +- **Send Emails**: Send transactional emails with HTML and text content +- **Email Templates**: Use and manage email templates +- **Attachments**: Send emails with file attachments +- **Bulk Sending**: Send emails to multiple recipients +- **Email Tracking**: Track delivery and engagement metrics + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/resend/index.ts b/mcp_servers/resend/index.ts index c0c9c118..76fa0a78 100644 --- a/mcp_servers/resend/index.ts +++ b/mcp_servers/resend/index.ts @@ -1,8 +1,9 @@ #!/usr/bin/env node -import express from "express"; +import express, { Request, Response } from 'express'; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import { z } from 'zod'; import { Resend } from "resend"; import { AsyncLocalStorage } from 'async_hooks'; @@ -13,146 +14,777 @@ dotenv.config(); // Create AsyncLocalStorage for request context const asyncLocalStorage = new AsyncLocalStorage<{ - resendClient: Resend; + apiKey: string; }>(); function getResendClient() { - return asyncLocalStorage.getStore()!.resendClient; + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('API key not found in AsyncLocalStorage'); + } + return new Resend(store.apiKey); } -// Create server instance -const server = new McpServer({ - name: "resend-email-service", - version: "1.0.0", -}); +const getResendMcpServer = () => { + // Create server instance + const server = new McpServer({ + name: "resend-email-service", + version: "1.0.0", + }); -server.tool( - "send-email", - "Send an email using Resend", - { - to: z.string().email().describe("Recipient email address"), - subject: z.string().describe("Email subject line"), - text: z.string().describe("Plain text email content"), - html: z - .string() - .optional() - .describe( - "HTML email content. When provided, the plain text argument MUST be provided as well." - ), - cc: z - .string() - .email() - .array() - .optional() - .describe("Optional array of CC email addresses. You MUST ask the user for this parameter. Under no circumstance provide it yourself"), - bcc: z - .string() - .email() - .array() - .optional() - .describe("Optional array of BCC email addresses. You MUST ask the user for this parameter. Under no circumstance provide it yourself"), - scheduledAt: z - .string() - .optional() - .describe( - "Optional parameter to schedule the email. This uses natural language. Examples would be 'tomorrow at 10am' or 'in 2 hours' or 'next day at 9am PST' or 'Friday at 3pm ET'." - ), - // If sender email address is not provided, the tool requires it as an argument - from: z - .string() - .email() - .nonempty() - .describe( - "Sender email address. You MUST ask the user for this parameter. Under no circumstance provide it yourself" - ), - replyTo: z - .string() - .email() - .array() - .optional() - .describe( - "Optional email addresses for the email readers to reply to. You MUST ask the user for this parameter. Under no circumstance provide it yourself" - ), - }, - async ({ from, to, subject, text, html, replyTo, scheduledAt, cc, bcc }) => { - const fromEmailAddress = from; - const replyToEmailAddresses = replyTo; - - // Type check on from, since "from" is optionally included in the arguments schema - // This should never happen. - if (typeof fromEmailAddress !== "string") { - throw new Error("from argument must be provided."); - } - - // Similar type check for "reply-to" email addresses. - if ( - typeof replyToEmailAddresses !== "string" && - !Array.isArray(replyToEmailAddresses) - ) { - throw new Error("replyTo argument must be provided."); - } - - console.error(`Debug - Sending email with from: ${fromEmailAddress}`); - - // Explicitly structure the request with all parameters to ensure they're passed correctly - const emailRequest: { - to: string; - subject: string; - text: string; - from: string; - replyTo: string | string[]; - html?: string; - scheduledAt?: string; - cc?: string[]; - bcc?: string[]; - } = { - to, - subject, - text, - from: fromEmailAddress, - replyTo: replyToEmailAddresses, - }; - - // Add optional parameters conditionally - if (html) { - emailRequest.html = html; - } - - if (scheduledAt) { - emailRequest.scheduledAt = scheduledAt; - } - - if (cc) { - emailRequest.cc = cc; - } - - if (bcc) { - emailRequest.bcc = bcc; - } - - console.error(`Email request: ${JSON.stringify(emailRequest)}`); - - const resend = getResendClient(); - const response = await resend.emails.send(emailRequest); - - if (response.error) { - throw new Error( - `Email failed to send: ${JSON.stringify(response.error)}` - ); - } - - return { - content: [ - { - type: "text", - text: `Email sent successfully! ${JSON.stringify(response.data)}`, - }, - ], - }; + server.tool( + "resend_send_email", + "Send an email using Resend", + { + to: z.string().email().describe("Recipient email address"), + subject: z.string().describe("Email subject line"), + text: z.string().describe("Plain text email content"), + html: z + .string() + .optional() + .describe( + "HTML email content. When provided, the plain text argument MUST be provided as well." + ), + cc: z + .string() + .email() + .array() + .optional() + .describe("Optional array of CC email addresses. You MUST ask the user for this parameter. Under no circumstance provide it yourself"), + bcc: z + .string() + .email() + .array() + .optional() + .describe("Optional array of BCC email addresses. You MUST ask the user for this parameter. Under no circumstance provide it yourself"), + scheduledAt: z + .string() + .optional() + .describe( + "Optional parameter to schedule the email. This uses natural language. Examples would be 'tomorrow at 10am' or 'in 2 hours' or 'next day at 9am PST' or 'Friday at 3pm ET'." + ), + // If sender email address is not provided, the tool requires it as an argument + from: z + .string() + .email() + .nonempty() + .describe( + "Sender email address. You MUST ask the user for this parameter. Under no circumstance provide it yourself" + ), + replyTo: z + .string() + .email() + .array() + .optional() + .describe( + "Optional email addresses for the email readers to reply to. You MUST ask the user for this parameter. Under no circumstance provide it yourself" + ), + }, + { category: "RESEND_EMAIL" }, + async ({ from, to, subject, text, html, replyTo, scheduledAt, cc, bcc }) => { + const fromEmailAddress = from; + const replyToEmailAddresses = replyTo; + + // Type check on from, since "from" is optionally included in the arguments schema + // This should never happen. + if (typeof fromEmailAddress !== "string") { + throw new Error("from argument must be provided."); + } + + console.error(`Debug - Sending email with from: ${fromEmailAddress}`); + + // Explicitly structure the request with all parameters to ensure they're passed correctly + const emailRequest: { + to: string; + subject: string; + text: string; + from: string; + replyTo?: string | string[]; + html?: string; + scheduledAt?: string; + cc?: string[]; + bcc?: string[]; + } = { + to, + subject, + text, + from: fromEmailAddress, + }; + + // Add optional parameters conditionally + if (replyToEmailAddresses) { + emailRequest.replyTo = replyToEmailAddresses; + } + + if (html) { + emailRequest.html = html; + } + + if (scheduledAt) { + emailRequest.scheduledAt = scheduledAt; + } + + if (cc) { + emailRequest.cc = cc; + } + + if (bcc) { + emailRequest.bcc = bcc; + } + + console.error(`Email request: ${JSON.stringify(emailRequest)}`); + + const resend = getResendClient(); + const response = await resend.emails.send(emailRequest); + + if (response.error) { + throw new Error( + `Email failed to send: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Email sent successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_create_audience", + "Create a new audience in Resend", + { + name: z.string().describe("Name of the audience to create"), + }, + { category: "RESEND_AUDIENCE" }, + async ({ name }) => { + const resend = getResendClient(); + const response = await resend.audiences.create({ name }); + + if (response.error) { + throw new Error( + `Failed to create audience: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Audience created successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_get_audience", + "Retrieve audience details by ID in Resend", + { + id: z.string().describe("ID of the audience to retrieve"), + }, + { category: "RESEND_AUDIENCE", readOnlyHint: true }, + async ({ id }) => { + const resend = getResendClient(); + const response = await resend.audiences.get(id); + + if (response.error) { + throw new Error( + `Failed to retrieve audience: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Audience retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_delete_audience", + "Delete an audience by ID in Resend", + { + id: z.string().describe("ID of the audience to delete"), + }, + { category: "RESEND_AUDIENCE" }, + async ({ id }) => { + const resend = getResendClient(); + const response = await resend.audiences.remove(id); + + if (response.error) { + throw new Error( + `Failed to delete audience: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Audience deleted successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_list_audiences", + "List all audiences in Resend", + {}, + { category: "RESEND_AUDIENCE", readOnlyHint: true }, + async () => { + const resend = getResendClient(); + const response = await resend.audiences.list(); + + if (response.error) { + throw new Error( + `Failed to list audiences: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Audiences retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_create_contact", + "Create a new contact in a Resend audience", + { + email: z.string().email().describe("Email address of the contact"), + audienceId: z.string().describe("ID of the audience to add the contact to"), + firstName: z.string().optional().describe("First name of the contact"), + lastName: z.string().optional().describe("Last name of the contact"), + unsubscribed: z.boolean().optional().describe("Whether the contact is unsubscribed"), + }, + { category: "RESEND_CONTACT" }, + async ({ email, audienceId, firstName, lastName, unsubscribed }) => { + const resend = getResendClient(); + const response = await resend.contacts.create({ + email, + audienceId, + firstName, + lastName, + unsubscribed, + }); + + if (response.error) { + throw new Error( + `Failed to create contact: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Contact created successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_get_contact", + "Retrieve a contact from a Resend audience by ID or email", + { + audienceId: z.string().describe("ID of the audience the contact belongs to"), + id: z.string().optional().describe("ID of the contact to retrieve"), + email: z.string().email().optional().describe("Email of the contact to retrieve"), + }, + { category: "RESEND_CONTACT", readOnlyHint: true }, + async ({ audienceId, id, email }) => { + if (!id && !email) { + throw new Error("Either contact ID or email must be provided"); + } + + const resend = getResendClient(); + let response: any = null; + + if (id) { + // Lookup by ID + response = await resend.contacts.get({ + id, + audienceId, + }); + } else if (email) { + // Based on the provided examples, we need to use different method or params for email lookup + // Let's try to find the contact by email in the list + const listResponse = await resend.contacts.list({ audienceId }); + + if (listResponse.error) { + throw new Error(`Failed to list contacts: ${JSON.stringify(listResponse.error)}`); + } + + const contactData = listResponse.data?.data?.find(contact => contact.email === email); + + if (!contactData) { + throw new Error(`Contact with email ${email} not found`); + } + + // Now get the full contact details by ID + response = await resend.contacts.get({ + id: contactData.id, + audienceId, + }); + } + + if (!response) { + throw new Error("Failed to retrieve contact"); + } + + if (response.error) { + throw new Error( + `Failed to retrieve contact: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Contact retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_update_contact", + "Update a contact in a Resend audience by ID or email", + { + audienceId: z.string().describe("ID of the audience the contact belongs to"), + id: z.string().optional().describe("ID of the contact to update"), + email: z.string().email().optional().describe("Email of the contact to update"), + firstName: z.string().optional().describe("Updated first name"), + lastName: z.string().optional().describe("Updated last name"), + unsubscribed: z.boolean().optional().describe("Updated unsubscribed status"), + }, + { category: "RESEND_CONTACT" }, + async ({ audienceId, id, email, firstName, lastName, unsubscribed }) => { + if (!id && !email) { + throw new Error("Either contact ID or email must be provided"); + } + + const resend = getResendClient(); + let response: any = null; + + // Prepare update data + const updateData: any = { + audienceId, + ...(firstName !== undefined ? { firstName } : {}), + ...(lastName !== undefined ? { lastName } : {}), + ...(unsubscribed !== undefined ? { unsubscribed } : {}) + }; + + if (id) { + // Update by ID + updateData.id = id; + response = await resend.contacts.update(updateData); + } else if (email) { + // First check if we need to find the ID for this email + const listResponse = await resend.contacts.list({ audienceId }); + + if (listResponse.error) { + throw new Error(`Failed to list contacts: ${JSON.stringify(listResponse.error)}`); + } + + const contactData = listResponse.data?.data?.find(contact => contact.email === email); + + if (!contactData) { + throw new Error(`Contact with email ${email} not found`); + } + + // Now update using the ID + updateData.id = contactData.id; + response = await resend.contacts.update(updateData); + } + + if (!response) { + throw new Error("Failed to update contact"); + } + + if (response.error) { + throw new Error( + `Failed to update contact: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Contact updated successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_delete_contact", + "Delete a contact from a Resend audience by ID or email", + { + audienceId: z.string().describe("ID of the audience the contact belongs to"), + id: z.string().optional().describe("ID of the contact to delete"), + email: z.string().email().optional().describe("Email of the contact to delete"), + }, + { category: "RESEND_CONTACT" }, + async ({ audienceId, id, email }) => { + if (!id && !email) { + throw new Error("Either contact ID or email must be provided"); + } + + const resend = getResendClient(); + let response: any = null; + + if (id) { + // Delete by ID + response = await resend.contacts.remove({ + id, + audienceId, + }); + } else if (email) { + // First check if we need to find the ID for this email + const listResponse = await resend.contacts.list({ audienceId }); + + if (listResponse.error) { + throw new Error(`Failed to list contacts: ${JSON.stringify(listResponse.error)}`); + } + + const contactData = listResponse.data?.data?.find(contact => contact.email === email); + + if (!contactData) { + throw new Error(`Contact with email ${email} not found`); + } + + // Now delete using the ID + response = await resend.contacts.remove({ + id: contactData.id, + audienceId, + }); + } + + if (!response) { + throw new Error("Failed to delete contact"); + } + + if (response.error) { + throw new Error( + `Failed to delete contact: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Contact deleted successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_list_contacts", + "List all contacts in a Resend audience", + { + audienceId: z.string().describe("ID of the audience to list contacts from"), + }, + { category: "RESEND_CONTACT", readOnlyHint: true }, + async ({ audienceId }) => { + const resend = getResendClient(); + const response = await resend.contacts.list({ + audienceId, + }); + + if (response.error) { + throw new Error( + `Failed to list contacts: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Contacts retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_create_broadcast", + "Create a new broadcast in Resend", + { + audienceId: z.string().describe("ID of the audience to send the broadcast to"), + from: z.string().describe("Sender email and name in the format 'Name '"), + subject: z.string().describe("Subject line of the broadcast"), + html: z.string().describe("HTML content of the broadcast. Can include variables like {{{FIRST_NAME|there}}} and {{{RESEND_UNSUBSCRIBE_URL}}}"), + name: z.string().optional().describe("Optional name for the broadcast"), + replyTo: z.string().optional().describe("Optional reply-to email address"), + previewText: z.string().optional().describe("Optional preview text that appears in email clients"), + }, + { category: "RESEND_BROADCAST" }, + async ({ audienceId, from, subject, html, name, replyTo, previewText }) => { + const resend = getResendClient(); + const response = await resend.broadcasts.create({ + audienceId, + from, + subject, + html, + ...(name && { name }), + ...(replyTo && { replyTo }), + ...(previewText && { previewText }), + }); + + if (response.error) { + throw new Error( + `Failed to create broadcast: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Broadcast created successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_get_broadcast", + "Retrieve a broadcast by ID from Resend", + { + id: z.string().describe("ID of the broadcast to retrieve"), + }, + { category: "RESEND_BROADCAST", readOnlyHint: true }, + async ({ id }) => { + const resend = getResendClient(); + const response = await resend.broadcasts.get(id); + + if (response.error) { + throw new Error( + `Failed to retrieve broadcast: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Broadcast retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_send_broadcast", + "Send or schedule a broadcast in Resend", + { + id: z.string().describe("ID of the broadcast to send"), + scheduledAt: z.string().optional().describe("Optional scheduling time in natural language (e.g., 'in 1 hour', 'tomorrow at 9am')"), + }, + { category: "RESEND_BROADCAST" }, + async ({ id, scheduledAt }) => { + const resend = getResendClient(); + + const sendOptions: any = {}; + if (scheduledAt) sendOptions.scheduledAt = scheduledAt; + + const response = await resend.broadcasts.send(id, sendOptions); + + if (response.error) { + throw new Error( + `Failed to send broadcast: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Broadcast ${scheduledAt ? 'scheduled' : 'sent'} successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_delete_broadcast", + "Delete a broadcast by ID in Resend", + { + id: z.string().describe("ID of the broadcast to delete"), + }, + { category: "RESEND_BROADCAST" }, + async ({ id }) => { + const resend = getResendClient(); + const response = await resend.broadcasts.remove(id); + + if (response.error) { + throw new Error( + `Failed to delete broadcast: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Broadcast deleted successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + server.tool( + "resend_list_broadcasts", + "List all broadcasts in Resend", + {}, + { category: "RESEND_BROADCAST", readOnlyHint: true }, + async () => { + const resend = getResendClient(); + const response = await resend.broadcasts.list(); + + if (response.error) { + throw new Error( + `Failed to list broadcasts: ${JSON.stringify(response.error)}` + ); + } + + return { + content: [ + { + type: "text", + text: `Broadcasts retrieved successfully! ${JSON.stringify(response.data)}`, + }, + ], + }; + } + ); + + return server; +} + +function extractApiKey(req: Request): string { + let authData = process.env.API_KEY; + + if (authData) { + return authData; + } + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Resend API key is missing. Provide it via API_KEY env var or x-auth-data header with token field.'); + return ''; } -); + + const authDataJson = JSON.parse(authData); + return authDataJson.token ?? authDataJson.api_key ?? ''; +} const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const apiKey = extractApiKey(req); + + const server = getResendMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ apiKey }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= const transports = new Map(); app.get("/sse", async (req, res) => { @@ -169,6 +801,7 @@ app.get("/sse", async (req, res) => { transports.set(transport.sessionId, transport); + const server = getResendMcpServer(); await server.connect(transport); console.log(`SSE connection established with transport: ${transport.sessionId}`); @@ -180,13 +813,10 @@ app.post("/messages", async (req, res) => { let transport: SSEServerTransport | undefined; transport = sessionId ? transports.get(sessionId) : undefined; if (transport) { - // Use environment variable for API key if available, otherwise use header - const apiKey = process.env.RESEND_API_KEY || req.headers['x-auth-token'] as string; - - const resendClient = new Resend(apiKey); + const apiKey = extractApiKey(req); - asyncLocalStorage.run({ resendClient }, async () => { - await transport.handlePostMessage(req, res); + asyncLocalStorage.run({ apiKey }, async () => { + await transport!.handlePostMessage(req, res); }); } else { console.error(`Transport not found for session ID: ${sessionId}`); diff --git a/mcp_servers/resend/package-lock.json b/mcp_servers/resend/package-lock.json index d463c701..af1b1444 100644 --- a/mcp_servers/resend/package-lock.json +++ b/mcp_servers/resend/package-lock.json @@ -1,17 +1,18 @@ { - "name": "resend", + "name": "@klavis-ai/mcp-server-resend", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "resend", + "name": "@klavis-ai/mcp-server-resend", "version": "1.0.0", - "license": "ISC", + "license": "MIT", "dependencies": { - "@modelcontextprotocol/sdk": "^1.5.0", - "@react-email/components": "^0.0.33", + "@modelcontextprotocol/sdk": "^1.12.1", + "@react-email/components": "^0.0.41", "@react-email/render": "^1.0.5", + "dotenv": "^16.3.1", "express": "^5.1.0", "resend": "^4.1.2", "zod": "^3.24.2" @@ -42,13 +43,19 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.5.0.tgz", - "integrity": "sha512-IJ+5iVVs8FCumIHxWqpwgkwOzyhtHVKy45s6Ug7Dv0MfRpaYisH8QQ87rIWeWdOzlk8sfhitZ7HCyQZk7d6b8w==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.1.tgz", + "integrity": "sha512-KG1CZhZfWg+u8pxeM/mByJDScJSrjjxLc8fwQqbsS8xCjBmQfMNEBTotYdNanKekepnfRI85GtgQlctLFpcYPw==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" @@ -95,12 +102,12 @@ } }, "node_modules/@react-email/code-block": { - "version": "0.0.11", - "resolved": "https://registry.npmjs.org/@react-email/code-block/-/code-block-0.0.11.tgz", - "integrity": "sha512-4D43p+LIMjDzm66gTDrZch0Flkip5je91mAT7iGs6+SbPyalHgIA+lFQoQwhz/VzHHLxuD0LV6gwmU/WUQ2WEg==", + "version": "0.0.13", + "resolved": "https://registry.npmjs.org/@react-email/code-block/-/code-block-0.0.13.tgz", + "integrity": "sha512-4DE4yPSgKEOnZMzcrDvRuD6mxsNxOex0hCYEG9F9q23geYgb2WCCeGBvIUXVzK69l703Dg4Vzrd5qUjl+JfcwA==", "license": "MIT", "dependencies": { - "prismjs": "1.29.0" + "prismjs": "^1.30.0" }, "engines": { "node": ">=18.0.0" @@ -134,14 +141,14 @@ } }, "node_modules/@react-email/components": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.33.tgz", - "integrity": "sha512-/GKdT3YijT1iEWPAXF644jr12w5xVgzUr0zlbZGt2KOkGeFHNZUCL5UtRopmnjrH/Fayf8Gjv6q/4E2cZgDtdQ==", + "version": "0.0.41", + "resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.41.tgz", + "integrity": "sha512-WUI3wHwra3QS0pwrovSU6b0I0f3TvY33ph0y44LuhSYDSQlMRyeOzgoT6HRDY5FXMDF57cHYq9WoKwpwP0yd7Q==", "license": "MIT", "dependencies": { "@react-email/body": "0.0.11", "@react-email/button": "0.0.19", - "@react-email/code-block": "0.0.11", + "@react-email/code-block": "0.0.13", "@react-email/code-inline": "0.0.5", "@react-email/column": "0.0.13", "@react-email/container": "0.0.15", @@ -152,13 +159,13 @@ "@react-email/html": "0.0.11", "@react-email/img": "0.0.11", "@react-email/link": "0.0.12", - "@react-email/markdown": "0.0.14", - "@react-email/preview": "0.0.12", - "@react-email/render": "1.0.5", + "@react-email/markdown": "0.0.15", + "@react-email/preview": "0.0.13", + "@react-email/render": "1.1.2", "@react-email/row": "0.0.12", "@react-email/section": "0.0.16", - "@react-email/tailwind": "1.0.4", - "@react-email/text": "0.0.11" + "@react-email/tailwind": "1.0.5", + "@react-email/text": "0.1.4" }, "engines": { "node": ">=18.0.0" @@ -261,12 +268,12 @@ } }, "node_modules/@react-email/markdown": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/@react-email/markdown/-/markdown-0.0.14.tgz", - "integrity": "sha512-5IsobCyPkb4XwnQO8uFfGcNOxnsg3311GRXhJ3uKv51P7Jxme4ycC/MITnwIZ10w2zx7HIyTiqVzTj4XbuIHbg==", + "version": "0.0.15", + "resolved": "https://registry.npmjs.org/@react-email/markdown/-/markdown-0.0.15.tgz", + "integrity": "sha512-UQA9pVm5sbflgtg3EX3FquUP4aMBzmLReLbGJ6DZQZnAskBF36aI56cRykDq1o+1jT+CKIK1CducPYziaXliag==", "license": "MIT", "dependencies": { - "md-to-react-email": "5.0.5" + "md-to-react-email": "^5.0.5" }, "engines": { "node": ">=18.0.0" @@ -276,9 +283,9 @@ } }, "node_modules/@react-email/preview": { - "version": "0.0.12", - "resolved": "https://registry.npmjs.org/@react-email/preview/-/preview-0.0.12.tgz", - "integrity": "sha512-g/H5fa9PQPDK6WUEG7iTlC19sAktI23qyoiJtMLqQiXFCfWeQMhqjLGKeLSKkfzszqmfJCjZtpSiKtBoOdxp3Q==", + "version": "0.0.13", + "resolved": "https://registry.npmjs.org/@react-email/preview/-/preview-0.0.13.tgz", + "integrity": "sha512-F7j9FJ0JN/A4d7yr+aw28p4uX7VLWs7hTHtLo7WRyw4G+Lit6Zucq4UWKRxJC8lpsUdzVmG7aBJnKOT+urqs/w==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -288,14 +295,14 @@ } }, "node_modules/@react-email/render": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.0.5.tgz", - "integrity": "sha512-CA69HYXPk21HhtAXATIr+9JJwpDNmAFCvdMUjWmeoD1+KhJ9NAxusMRxKNeibdZdslmq3edaeOKGbdQ9qjK8LQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.1.2.tgz", + "integrity": "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw==", "license": "MIT", "dependencies": { - "html-to-text": "9.0.5", - "prettier": "3.4.2", - "react-promise-suspense": "0.3.4" + "html-to-text": "^9.0.5", + "prettier": "^3.5.3", + "react-promise-suspense": "^0.3.4" }, "engines": { "node": ">=18.0.0" @@ -330,9 +337,9 @@ } }, "node_modules/@react-email/tailwind": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-1.0.4.tgz", - "integrity": "sha512-tJdcusncdqgvTUYZIuhNC6LYTfL9vNTSQpwWdTCQhQ1lsrNCEE4OKCSdzSV3S9F32pi0i0xQ+YPJHKIzGjdTSA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-1.0.5.tgz", + "integrity": "sha512-BH00cZSeFfP9HiDASl+sPHi7Hh77W5nzDgdnxtsVr/m3uQD9g180UwxcE3PhOfx0vRdLzQUU8PtmvvDfbztKQg==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -342,9 +349,9 @@ } }, "node_modules/@react-email/text": { - "version": "0.0.11", - "resolved": "https://registry.npmjs.org/@react-email/text/-/text-0.0.11.tgz", - "integrity": "sha512-a7nl/2KLpRHOYx75YbYZpWspUbX1DFY7JIZbOv5x0QU8SvwDbJt+Hm01vG34PffFyYvHEXrc6Qnip2RTjljNjg==", + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@react-email/text/-/text-0.1.4.tgz", + "integrity": "sha512-cMNE02y8172DocpNGh97uV5HSTawaS4CKG/zOku8Pu+m6ehBKbAjgtQZDIxhgstw8+TWraFB8ltS1DPjfG8nLA==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -523,6 +530,28 @@ "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv/node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, "node_modules/ansi-regex": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", @@ -574,9 +603,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" @@ -696,6 +725,19 @@ "node": ">=6.6.0" } }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -807,6 +849,18 @@ "url": "https://github.com/fb55/domutils?sponsor=1" } }, + "node_modules/dotenv": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", + "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -986,12 +1040,33 @@ "url": "https://opencollective.com/express" } }, + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" + } + }, "node_modules/fast-deep-equal": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", "integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==", "license": "MIT" }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, "node_modules/finalhandler": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", @@ -1310,6 +1385,12 @@ "node": ">=14" } }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/leac": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/leac/-/leac-0.6.0.tgz", @@ -1454,6 +1535,15 @@ "node": "^18.17.0 || >=20.5.0" } }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/object-inspect": { "version": "1.13.4", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", @@ -1558,10 +1648,19 @@ "url": "https://ko-fi.com/killymxi" } }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, "node_modules/prettier": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", - "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", + "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -1574,9 +1673,9 @@ } }, "node_modules/prismjs": { - "version": "1.29.0", - "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", - "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==", + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz", + "integrity": "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==", "license": "MIT", "engines": { "node": ">=6" @@ -1601,6 +1700,15 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", @@ -2081,6 +2189,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", diff --git a/mcp_servers/resend/package.json b/mcp_servers/resend/package.json index aa087837..56da6140 100644 --- a/mcp_servers/resend/package.json +++ b/mcp_servers/resend/package.json @@ -1,14 +1,14 @@ { - "name": "resend", + "name": "@klavis-ai/mcp-server-resend", "version": "1.0.0", "main": "index.js", "keywords": [], - "author": "", - "license": "ISC", - "description": "", + "author": "Klavis AI", + "license": "MIT", + "description": "Klavis MCP server for Resend", "dependencies": { - "@modelcontextprotocol/sdk": "^1.5.0", - "@react-email/components": "^0.0.33", + "@modelcontextprotocol/sdk": "^1.12.1", + "@react-email/components": "^0.0.41", "@react-email/render": "^1.0.5", "dotenv": "^16.3.1", "resend": "^4.1.2", @@ -24,6 +24,7 @@ }, "type": "module", "scripts": { - "build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"" + "build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"", + "start": "node build/index.js" } } diff --git a/mcp_servers/salesforce/.env.example b/mcp_servers/salesforce/.env.example new file mode 100644 index 00000000..6b9811f4 --- /dev/null +++ b/mcp_servers/salesforce/.env.example @@ -0,0 +1,2 @@ +# Port for the MCP server to listen on +SALESFORCE_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/salesforce/Dockerfile b/mcp_servers/salesforce/Dockerfile new file mode 100644 index 00000000..65d064ea --- /dev/null +++ b/mcp_servers/salesforce/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/salesforce/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY mcp_servers/salesforce/server.py . +COPY mcp_servers/salesforce/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5001 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/salesforce/README.md b/mcp_servers/salesforce/README.md new file mode 100644 index 00000000..feba2e3d --- /dev/null +++ b/mcp_servers/salesforce/README.md @@ -0,0 +1,78 @@ +# Salesforce MCP Server + +A Model Context Protocol (MCP) server for Salesforce CRM integration. Manage leads, contacts, opportunities, and other Salesforce objects using the Salesforce API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Salesforce with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("SALESFORCE", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/salesforce-mcp-server:latest + + +# Run Salesforce MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/salesforce-mcp-server:latest + +# Run Salesforce MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_salesforce_access_token_here"}' \ + ghcr.io/klavis-ai/salesforce-mcp-server:latest +``` + +**OAuth Setup:** Salesforce requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Lead Management**: Create, read, update leads and lead conversion +- **Contact Operations**: Manage contacts and customer relationships +- **Opportunity Tracking**: Handle sales opportunities and pipeline +- **Account Management**: Manage customer accounts and hierarchies +- **Custom Objects**: Work with custom Salesforce objects and fields +- **Reports & Analytics**: Access Salesforce reports and dashboards + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/salesforce/requirements.txt b/mcp_servers/salesforce/requirements.txt new file mode 100644 index 00000000..3f87b6b1 --- /dev/null +++ b/mcp_servers/salesforce/requirements.txt @@ -0,0 +1,7 @@ +mcp==1.11.0 +starlette>=0.40.0 +uvicorn>=0.32.1 +click>=8.1.7 +python-dotenv>=1.0.1 +simple-salesforce>=1.12.6 +requests>=2.32.3 \ No newline at end of file diff --git a/mcp_servers/salesforce/server.py b/mcp_servers/salesforce/server.py new file mode 100644 index 00000000..d90589e2 --- /dev/null +++ b/mcp_servers/salesforce/server.py @@ -0,0 +1,670 @@ +import contextlib +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar +import base64 + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + access_token_context, instance_url_context, + # Accounts + get_accounts, create_account, update_account, delete_account, + # Contacts + get_contacts, create_contact, update_contact, delete_contact, + # Opportunities + get_opportunities, create_opportunity, update_opportunity, delete_opportunity, + # Leads + get_leads, create_lead, update_lead, delete_lead, convert_lead, + # Cases + get_cases, create_case, update_case, delete_case, + # Campaigns + get_campaigns, create_campaign, update_campaign, delete_campaign, + # Metadata & Queries + describe_object, execute_soql_query +) + +# Configure logging +logger = logging.getLogger(__name__) +load_dotenv() +SALESFORCE_MCP_SERVER_PORT = int(os.getenv("SALESFORCE_MCP_SERVER_PORT", "5000")) + +def extract_auth_credentials(request_or_scope) -> tuple[str, str]: + """Extract access token and instance URL from request headers. + + Returns: + tuple: (access_token, instance_url) + """ + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Get headers based on input type + if hasattr(request_or_scope, 'headers'): + # SSE request object + header_value = request_or_scope.headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + header_value = headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + if not auth_data: + return "", "" + + try: + auth_json = json.loads(auth_data) + return auth_json.get('access_token', ''), auth_json.get('instance_url', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "", "" + +@click.command() +@click.option("--port", default=SALESFORCE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +@click.option("--json-response", is_flag=True, default=False, help="Enable JSON responses for StreamableHTTP instead of SSE streams") +def main(port: int, log_level: str, json_response: bool) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("salesforce-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # Account Tools + types.Tool( + name="salesforce_get_accounts", + description="Get accounts with flexible filtering options including name search, industry, and type.", + inputSchema={ + "type": "object", + "properties": { + "limit": {"type": "integer", "description": "Maximum number of accounts to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"}, + "name_contains": {"type": "string", "description": "Filter accounts by name containing this text (case-insensitive)"}, + "industry": {"type": "string", "description": "Filter accounts by industry"}, + "account_type": {"type": "string", "description": "Filter accounts by type"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_ACCOUNT", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_account", + description="Create a new account in Salesforce.", + inputSchema={ + "type": "object", + "required": ["account_data"], + "properties": { + "account_data": {"type": "object", "description": "Account data including Name (required) and other fields"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_ACCOUNT"}) + ), + types.Tool( + name="salesforce_update_account", + description="Update an existing account.", + inputSchema={ + "type": "object", + "required": ["account_id", "account_data"], + "properties": { + "account_id": {"type": "string", "description": "The ID of the account to update"}, + "account_data": {"type": "object", "description": "Updated account data"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_ACCOUNT"}) + ), + types.Tool( + name="salesforce_delete_account", + description="Delete an account.", + inputSchema={ + "type": "object", + "required": ["account_id"], + "properties": { + "account_id": {"type": "string", "description": "The ID of the account to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_ACCOUNT"}) + ), + + # Contact Tools + types.Tool( + name="salesforce_get_contacts", + description="Get contacts with flexible filtering options including name, email, and title search.", + inputSchema={ + "type": "object", + "properties": { + "account_id": {"type": "string", "description": "Filter contacts by account ID"}, + "limit": {"type": "integer", "description": "Maximum number of contacts to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"}, + "name_contains": {"type": "string", "description": "Filter contacts by first or last name containing this text (case-insensitive)"}, + "email_contains": {"type": "string", "description": "Filter contacts by email containing this text (case-insensitive)"}, + "title_contains": {"type": "string", "description": "Filter contacts by title containing this text (case-insensitive)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CONTACT", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_contact", + description="Create a new contact in Salesforce.", + inputSchema={ + "type": "object", + "required": ["contact_data"], + "properties": { + "contact_data": {"type": "object", "description": "Contact data including LastName (required) and other fields"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CONTACT"}) + ), + types.Tool( + name="salesforce_update_contact", + description="Update an existing contact.", + inputSchema={ + "type": "object", + "required": ["contact_id", "contact_data"], + "properties": { + "contact_id": {"type": "string", "description": "The ID of the contact to update"}, + "contact_data": {"type": "object", "description": "Updated contact data"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CONTACT"}) + ), + types.Tool( + name="salesforce_delete_contact", + description="Delete a contact.", + inputSchema={ + "type": "object", + "required": ["contact_id"], + "properties": { + "contact_id": {"type": "string", "description": "The ID of the contact to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CONTACT"}) + ), + + # Opportunity Tools + types.Tool( + name="salesforce_get_opportunities", + description="Get opportunities, optionally filtered by account, stage, name, or account name.", + inputSchema={ + "type": "object", + "properties": { + "account_id": {"type": "string", "description": "Filter opportunities by account ID"}, + "stage": {"type": "string", "description": "Filter opportunities by stage"}, + "name_contains": {"type": "string", "description": "Filter opportunities by name containing this text"}, + "account_name_contains": {"type": "string", "description": "Filter opportunities by account name containing this text"}, + "limit": {"type": "integer", "description": "Maximum number of opportunities to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_OPPORTUNITY", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_opportunity", + description="Create a new opportunity in Salesforce.", + inputSchema={ + "type": "object", + "required": ["opportunity_data"], + "properties": { + "opportunity_data": {"type": "object", "description": "Opportunity data including Name, StageName, and CloseDate (required)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_OPPORTUNITY"}) + ), + types.Tool( + name="salesforce_update_opportunity", + description="Update an existing opportunity.", + inputSchema={ + "type": "object", + "required": ["opportunity_id"], + "properties": { + "opportunity_id": {"type": "string", "description": "The ID of the opportunity to update"}, + "closed_date": {"type": "string", "description": "The date the opportunity was closed"}, + "stage": {"type": "string", "description": "The stage the opportunity is in"}, + "amount": {"type": "number", "description": "The amount of the opportunity"}, + "next_step": {"type": "string", "description": "The next step for the opportunity"}, + "description": {"type": "string", "description": "The description of the opportunity"}, + "owner_id": {"type": "string", "description": "The ID of the owner of the opportunity"}, + "account_id": {"type": "string", "description": "The ID of the account associated with the opportunity"}, + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_OPPORTUNITY"}) + ), + types.Tool( + name="salesforce_delete_opportunity", + description="Delete an opportunity.", + inputSchema={ + "type": "object", + "required": ["opportunity_id"], + "properties": { + "opportunity_id": {"type": "string", "description": "The ID of the opportunity to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_OPPORTUNITY"}) + ), + + # Lead Tools + types.Tool( + name="salesforce_get_leads", + description="Get leads with flexible filtering options including name, company, email, and industry search.", + inputSchema={ + "type": "object", + "properties": { + "status": {"type": "string", "description": "Filter leads by status"}, + "limit": {"type": "integer", "description": "Maximum number of leads to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"}, + "name_contains": {"type": "string", "description": "Filter leads by first or last name containing this text (case-insensitive)"}, + "company_contains": {"type": "string", "description": "Filter leads by company name containing this text (case-insensitive)"}, + "email_contains": {"type": "string", "description": "Filter leads by email containing this text (case-insensitive)"}, + "industry": {"type": "string", "description": "Filter leads by industry"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_LEAD", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_lead", + description="Create a new lead in Salesforce.", + inputSchema={ + "type": "object", + "required": ["lead_data"], + "properties": { + "lead_data": {"type": "object", "description": "Lead data including LastName and Company (required)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_LEAD"}) + ), + types.Tool( + name="salesforce_update_lead", + description="Update an existing lead.", + inputSchema={ + "type": "object", + "required": ["lead_id", "lead_data"], + "properties": { + "lead_id": {"type": "string", "description": "The ID of the lead to update"}, + "lead_data": {"type": "object", "description": "Updated lead data"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_LEAD"}) + ), + types.Tool( + name="salesforce_delete_lead", + description="Delete a lead.", + inputSchema={ + "type": "object", + "required": ["lead_id"], + "properties": { + "lead_id": {"type": "string", "description": "The ID of the lead to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_LEAD"}) + ), + types.Tool( + name="salesforce_convert_lead", + description="Convert a lead to account, contact, and optionally opportunity.", + inputSchema={ + "type": "object", + "required": ["lead_id"], + "properties": { + "lead_id": {"type": "string", "description": "The ID of the lead to convert"}, + "conversion_data": {"type": "object", "description": "Optional conversion settings"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_LEAD"}) + ), + + # Case Tools + types.Tool( + name="salesforce_get_cases", + description="Get cases with flexible filtering options including subject search, account, status, priority, and type.", + inputSchema={ + "type": "object", + "properties": { + "account_id": {"type": "string", "description": "Filter cases by account ID"}, + "status": {"type": "string", "description": "Filter cases by status"}, + "priority": {"type": "string", "description": "Filter cases by priority"}, + "limit": {"type": "integer", "description": "Maximum number of cases to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"}, + "subject_contains": {"type": "string", "description": "Filter cases by subject containing this text (case-insensitive)"}, + "case_type": {"type": "string", "description": "Filter cases by type"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CASE", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_case", + description="Create a new case in Salesforce.", + inputSchema={ + "type": "object", + "required": ["case_data"], + "properties": { + "case_data": {"type": "object", "description": "Case data including Subject (required)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CASE"}) + ), + types.Tool( + name="salesforce_update_case", + description="Update an existing case.", + inputSchema={ + "type": "object", + "required": ["case_id", "case_data"], + "properties": { + "case_id": {"type": "string", "description": "The ID of the case to update"}, + "case_data": {"type": "object", "description": "Updated case data"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CASE"}) + ), + types.Tool( + name="salesforce_delete_case", + description="Delete a case.", + inputSchema={ + "type": "object", + "required": ["case_id"], + "properties": { + "case_id": {"type": "string", "description": "The ID of the case to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CASE"}) + ), + + # Campaign Tools + types.Tool( + name="salesforce_get_campaigns", + description="Get campaigns, optionally filtered by status or type.", + inputSchema={ + "type": "object", + "properties": { + "status": {"type": "string", "description": "Filter campaigns by status"}, + "type_filter": {"type": "string", "description": "Filter campaigns by type"}, + "limit": {"type": "integer", "description": "Maximum number of campaigns to return (default: 50)", "default": 50}, + "fields": {"type": "array", "items": {"type": "string"}, "description": "Specific fields to retrieve"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CAMPAIGN", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_create_campaign", + description="Create a new campaign in Salesforce.", + inputSchema={ + "type": "object", + "required": ["campaign_data"], + "properties": { + "campaign_data": {"type": "object", "description": "Campaign data including Name (required)"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CAMPAIGN"}) + ), + types.Tool( + name="salesforce_update_campaign", + description="Update an existing campaign.", + inputSchema={ + "type": "object", + "required": ["campaign_id", "campaign_data"], + "properties": { + "campaign_id": {"type": "string", "description": "The ID of the campaign to update"}, + "campaign_data": {"type": "object", "description": "Updated campaign data"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CAMPAIGN"}) + ), + types.Tool( + name="salesforce_delete_campaign", + description="Delete a campaign.", + inputSchema={ + "type": "object", + "required": ["campaign_id"], + "properties": { + "campaign_id": {"type": "string", "description": "The ID of the campaign to delete"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_CAMPAIGN"}) + ), + + # Query and Metadata Tools + types.Tool( + name="salesforce_query", + description="Execute a SOQL query on Salesforce", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": {"type": "string", "description": "SOQL query to execute"} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_QUERY", "readOnlyHint": True}) + ), + types.Tool( + name="salesforce_describe_object", + description="Get detailed schema and field information for any Salesforce object.", + inputSchema={ + "type": "object", + "required": ["object_name"], + "properties": { + "object_name": {"type": "string", "description": "API name of the object to describe"}, + "detailed": {"type": "boolean", "description": "Whether to return additional metadata for custom objects", "default": False} + } + }, + annotations=types.ToolAnnotations(**{"category": "SALESFORCE_METADATA", "readOnlyHint": True}) + ), + ] + + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + try: + # Account tools + if name == "salesforce_get_accounts": + result = await get_accounts( + limit=arguments.get("limit", 50), + fields=arguments.get("fields"), + name_contains=arguments.get("name_contains"), + industry=arguments.get("industry"), + account_type=arguments.get("account_type") + ) + elif name == "salesforce_create_account": + result = await create_account(arguments["account_data"]) + elif name == "salesforce_update_account": + result = await update_account(arguments["account_id"], arguments["account_data"]) + elif name == "salesforce_delete_account": + result = await delete_account(arguments["account_id"]) + + # Contact tools + elif name == "salesforce_get_contacts": + result = await get_contacts( + account_id=arguments.get("account_id"), + limit=arguments.get("limit", 50), + fields=arguments.get("fields"), + name_contains=arguments.get("name_contains"), + email_contains=arguments.get("email_contains"), + title_contains=arguments.get("title_contains") + ) + elif name == "salesforce_create_contact": + result = await create_contact(arguments["contact_data"]) + elif name == "salesforce_update_contact": + result = await update_contact(arguments["contact_id"], arguments["contact_data"]) + elif name == "salesforce_delete_contact": + result = await delete_contact(arguments["contact_id"]) + + # Opportunity tools + elif name == "salesforce_get_opportunities": + result = await get_opportunities( + arguments.get("account_id"), + arguments.get("stage"), + arguments.get("name_contains"), + arguments.get("account_name_contains"), + arguments.get("limit", 50), + arguments.get("fields") + ) + elif name == "salesforce_create_opportunity": + result = await create_opportunity(arguments["opportunity_data"]) + elif name == "salesforce_update_opportunity": + result = await update_opportunity( + opportunity_id=arguments["opportunity_id"], + closed_date=arguments.get("closed_date"), + stage=arguments.get("stage"), + amount=arguments.get("amount"), + next_step=arguments.get("next_step"), + description=arguments.get("description"), + owner_id=arguments.get("owner_id"), + account_id=arguments.get("account_id") + ) + elif name == "salesforce_delete_opportunity": + result = await delete_opportunity(arguments["opportunity_id"]) + + # Lead tools + elif name == "salesforce_get_leads": + result = await get_leads( + status=arguments.get("status"), + limit=arguments.get("limit", 50), + fields=arguments.get("fields"), + name_contains=arguments.get("name_contains"), + company_contains=arguments.get("company_contains"), + email_contains=arguments.get("email_contains"), + industry=arguments.get("industry") + ) + elif name == "salesforce_create_lead": + result = await create_lead(arguments["lead_data"]) + elif name == "salesforce_update_lead": + result = await update_lead(arguments["lead_id"], arguments["lead_data"]) + elif name == "salesforce_delete_lead": + result = await delete_lead(arguments["lead_id"]) + elif name == "salesforce_convert_lead": + result = await convert_lead(arguments["lead_id"], arguments.get("conversion_data")) + + # Case tools + elif name == "salesforce_get_cases": + result = await get_cases( + account_id=arguments.get("account_id"), + status=arguments.get("status"), + priority=arguments.get("priority"), + limit=arguments.get("limit", 50), + fields=arguments.get("fields"), + subject_contains=arguments.get("subject_contains"), + case_type=arguments.get("case_type") + ) + elif name == "salesforce_create_case": + result = await create_case(arguments["case_data"]) + elif name == "salesforce_update_case": + result = await update_case(arguments["case_id"], arguments["case_data"]) + elif name == "salesforce_delete_case": + result = await delete_case(arguments["case_id"]) + + # Campaign tools + elif name == "salesforce_get_campaigns": + result = await get_campaigns(arguments.get("status"), arguments.get("type_filter"), arguments.get("limit", 50), arguments.get("fields")) + elif name == "salesforce_create_campaign": + result = await create_campaign(arguments["campaign_data"]) + elif name == "salesforce_update_campaign": + result = await update_campaign(arguments["campaign_id"], arguments["campaign_data"]) + elif name == "salesforce_delete_campaign": + result = await delete_campaign(arguments["campaign_id"]) + + + + # Query and metadata tools + elif name == "salesforce_query": + result = await execute_soql_query(arguments["query"]) + elif name == "salesforce_describe_object": + result = await describe_object(arguments["object_name"], arguments.get("detailed", False)) + + else: + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth credentials from headers + access_token, instance_url = extract_auth_credentials(request) + + # Set the access token and instance URL in context for this request + access_token_token = access_token_context.set(access_token or "") + instance_url_token = instance_url_context.set(instance_url or "") + try: + async with sse.connect_sse(request.scope, request.receive, request._send) as streams: + await app.run(streams[0], streams[1], app.create_initialization_options()) + finally: + access_token_context.reset(access_token_token) + instance_url_context.reset(instance_url_token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth credentials from headers + access_token, instance_url = extract_auth_credentials(scope) + + # Set the access token and instance URL in context for this request + access_token_token = access_token_context.set(access_token or "") + instance_url_token = instance_url_context.set(instance_url or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + access_token_context.reset(access_token_token) + instance_url_context.reset(instance_url_token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/__init__.py b/mcp_servers/salesforce/tools/__init__.py new file mode 100644 index 00000000..1ba91c6f --- /dev/null +++ b/mcp_servers/salesforce/tools/__init__.py @@ -0,0 +1,74 @@ +# Salesforce MCP Server Tools +# This package contains all the tool implementations organized by object type + +from .accounts import ( + get_accounts, create_account, update_account, delete_account +) +from .contacts import ( + get_contacts, create_contact, update_contact, delete_contact +) +from .opportunities import ( + get_opportunities, create_opportunity, update_opportunity, delete_opportunity +) +from .leads import ( + get_leads, create_lead, update_lead, delete_lead, convert_lead +) +from .cases import ( + get_cases, create_case, update_case, delete_case +) +from .campaigns import ( + get_campaigns, create_campaign, update_campaign, delete_campaign +) + +from .metadata import ( + describe_object, execute_soql_query +) +from .base import access_token_context, instance_url_context + +__all__ = [ + # Accounts + "get_accounts", + "create_account", + "update_account", + "delete_account", + + # Contacts + "get_contacts", + "create_contact", + "update_contact", + "delete_contact", + + # Opportunities + "get_opportunities", + "create_opportunity", + "update_opportunity", + "delete_opportunity", + + # Leads + "get_leads", + "create_lead", + "update_lead", + "delete_lead", + "convert_lead", + + # Cases + "get_cases", + "create_case", + "update_case", + "delete_case", + + # Campaigns + "get_campaigns", + "create_campaign", + "update_campaign", + "delete_campaign", + + + # Metadata & Queries + "describe_object", + "execute_soql_query", + + # Base + "access_token_context", + "instance_url_context", +] \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/accounts.py b/mcp_servers/salesforce/tools/accounts.py new file mode 100644 index 00000000..cb65fab8 --- /dev/null +++ b/mcp_servers/salesforce/tools/accounts.py @@ -0,0 +1,140 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_accounts(limit: int = 50, fields: Optional[List[str]] = None, name_contains: Optional[str] = None, industry: Optional[str] = None, account_type: Optional[str] = None) -> Dict[str, Any]: + """Get accounts with flexible filtering options.""" + logger.info(f"Executing tool: get_accounts with limit: {limit}, name_contains: {name_contains}, industry: {industry}, account_type: {account_type}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'Type', 'Industry', 'BillingStreet', 'BillingCity', + 'BillingState', 'BillingCountry', 'Phone', 'Website', 'OwnerId', + 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if name_contains: + # Case-insensitive search by trying multiple case variations + name_variations = [ + name_contains.lower(), + name_contains.upper(), + name_contains.capitalize(), + name_contains + ] + name_like_conditions = " OR ".join([f"Name LIKE '%{variation}%'" for variation in set(name_variations)]) + where_clauses.append(f"({name_like_conditions})") + + if industry: + where_clauses.append(f"Industry = '{industry}'") + if account_type: + where_clauses.append(f"Type = '{account_type}'") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Account{where_clause} ORDER BY Name LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_accounts: {e}") + raise e + +async def get_account_by_id(account_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific account by ID.""" + logger.info(f"Executing tool: get_account_by_id with account_id: {account_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'Type', 'Industry', 'Description', 'BillingStreet', + 'BillingCity', 'BillingState', 'BillingCountry', 'BillingPostalCode', + 'ShippingStreet', 'ShippingCity', 'ShippingState', 'ShippingCountry', + 'ShippingPostalCode', 'Phone', 'Fax', 'Website', 'NumberOfEmployees', + 'AnnualRevenue', 'OwnerId', 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Account WHERE Id = '{account_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_account_by_id: {e}") + raise e + +async def create_account(account_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new account.""" + logger.info(f"Executing tool: create_account") + try: + sf = get_salesforce_conn() + + # Validate required fields + if 'Name' not in account_data: + return { + "success": False, + "error": "Name is required for Account creation", + "message": "Failed to create Account" + } + + result = sf.Account.create(account_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Account", account_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Account" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Account") + +async def update_account(account_id: str, account_data: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing account.""" + logger.info(f"Executing tool: update_account with account_id: {account_id}") + try: + sf = get_salesforce_conn() + + result = sf.Account.update(account_id, account_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(account_id, "updated", "Account", account_data) + else: + return { + "success": False, + "message": f"Failed to update Account. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Account") + +async def delete_account(account_id: str) -> Dict[str, Any]: + """Delete an account.""" + logger.info(f"Executing tool: delete_account with account_id: {account_id}") + try: + sf = get_salesforce_conn() + + result = sf.Account.delete(account_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(account_id, "deleted", "Account") + else: + return { + "success": False, + "message": f"Failed to delete Account. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Account") \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/base.py b/mcp_servers/salesforce/tools/base.py new file mode 100644 index 00000000..a196fd00 --- /dev/null +++ b/mcp_servers/salesforce/tools/base.py @@ -0,0 +1,86 @@ +import logging +from typing import Any, Dict +from contextvars import ContextVar +from simple_salesforce import Salesforce +from simple_salesforce.exceptions import SalesforceError + +# Configure logging +logger = logging.getLogger(__name__) + +# Context variables to store the access token and instance URL for each request +access_token_context: ContextVar[str] = ContextVar('access_token') +instance_url_context: ContextVar[str] = ContextVar('instance_url') + +def get_salesforce_connection(access_token: str, instance_url: str) -> Salesforce: + """Create Salesforce connection with access token.""" + return Salesforce(instance_url=instance_url, session_id=access_token) + +def get_salesforce_conn() -> Salesforce: + """Get the Salesforce connection from context - created fresh each time.""" + try: + access_token = access_token_context.get() + instance_url = instance_url_context.get() + + if not access_token or not instance_url: + raise RuntimeError("Salesforce access token and instance URL are required. Provide them via x-auth-token and x-instance-url headers.") + + return get_salesforce_connection(access_token, instance_url) + except LookupError: + raise RuntimeError("Salesforce credentials not found in request context") + +def handle_salesforce_error(e: Exception, operation: str, object_type: str = "") -> Dict[str, Any]: + """Handle Salesforce errors and return standardized error response.""" + if isinstance(e, SalesforceError): + logger.error(f"Salesforce API error during {operation}: {e}") + error_msg = str(e) + # Try to extract more meaningful error information + if hasattr(e, 'content') and e.content: + try: + error_content = e.content[0]['message'] if isinstance(e.content, list) else e.content + if isinstance(error_content, dict) and 'message' in error_content: + error_msg = error_content['message'] + except: + pass + return { + "success": False, + "error": f"Salesforce API Error: {error_msg}", + "message": f"Failed to {operation} {object_type}".strip() + } + else: + logger.exception(f"Error during {operation}: {e}") + return { + "success": False, + "error": str(e), + "message": f"Failed to {operation} {object_type}".strip() + } + +def format_success_response(record_id: str, operation: str, object_type: str, data: Dict[str, Any] = None) -> Dict[str, Any]: + """Format a successful operation response.""" + response = { + "success": True, + "id": record_id, + "message": f"{object_type} {operation} successfully", + "object_type": object_type + } + if data: + response["data"] = data + return response + +def create_case_insensitive_like_conditions(search_term: str, *field_names: str) -> str: + """Create case-insensitive LIKE conditions for multiple fields.""" + if not search_term or not field_names: + return "" + + variations = [ + search_term.lower(), + search_term.upper(), + search_term.capitalize(), + search_term + ] + + all_conditions = [] + for field_name in field_names: + field_conditions = [f"{field_name} LIKE '%{variation}%'" for variation in set(variations)] + all_conditions.extend(field_conditions) + + return " OR ".join(all_conditions) \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/campaigns.py b/mcp_servers/salesforce/tools/campaigns.py new file mode 100644 index 00000000..1f469ac4 --- /dev/null +++ b/mcp_servers/salesforce/tools/campaigns.py @@ -0,0 +1,132 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_campaigns(status: Optional[str] = None, type_filter: Optional[str] = None, limit: int = 50, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get campaigns, optionally filtered by status or type.""" + logger.info(f"Executing tool: get_campaigns with status: {status}, type: {type_filter}, limit: {limit}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'Type', 'Status', 'StartDate', 'EndDate', 'IsActive', + 'Description', 'BudgetedCost', 'ActualCost', 'ExpectedRevenue', + 'NumberOfLeads', 'NumberOfConvertedLeads', 'NumberOfContacts', + 'NumberOfOpportunities', 'NumberOfWonOpportunities', 'OwnerId', + 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if status: + where_clauses.append(f"Status = '{status}'") + if type_filter: + where_clauses.append(f"Type = '{type_filter}'") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Campaign{where_clause} ORDER BY StartDate DESC LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_campaigns: {e}") + raise e + +async def get_campaign_by_id(campaign_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific campaign by ID.""" + logger.info(f"Executing tool: get_campaign_by_id with campaign_id: {campaign_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'Type', 'Status', 'StartDate', 'EndDate', 'IsActive', + 'Description', 'BudgetedCost', 'ActualCost', 'ExpectedRevenue', + 'ExpectedResponse', 'NumberSent', 'NumberOfLeads', 'NumberOfConvertedLeads', + 'NumberOfContacts', 'NumberOfResponses', 'NumberOfOpportunities', + 'NumberOfWonOpportunities', 'AmountAllOpportunities', 'AmountWonOpportunities', + 'OwnerId', 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Campaign WHERE Id = '{campaign_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_campaign_by_id: {e}") + raise e + +async def create_campaign(campaign_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new campaign.""" + logger.info(f"Executing tool: create_campaign") + try: + sf = get_salesforce_conn() + + # Validate required fields + if 'Name' not in campaign_data: + return { + "success": False, + "error": "Name is required for Campaign creation", + "message": "Failed to create Campaign" + } + + result = sf.Campaign.create(campaign_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Campaign", campaign_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Campaign" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Campaign") + +async def update_campaign(campaign_id: str, campaign_data: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing campaign.""" + logger.info(f"Executing tool: update_campaign with campaign_id: {campaign_id}") + try: + sf = get_salesforce_conn() + + result = sf.Campaign.update(campaign_id, campaign_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(campaign_id, "updated", "Campaign", campaign_data) + else: + return { + "success": False, + "message": f"Failed to update Campaign. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Campaign") + +async def delete_campaign(campaign_id: str) -> Dict[str, Any]: + """Delete a campaign.""" + logger.info(f"Executing tool: delete_campaign with campaign_id: {campaign_id}") + try: + sf = get_salesforce_conn() + + result = sf.Campaign.delete(campaign_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(campaign_id, "deleted", "Campaign") + else: + return { + "success": False, + "message": f"Failed to delete Campaign. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Campaign") \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/cases.py b/mcp_servers/salesforce/tools/cases.py new file mode 100644 index 00000000..67dd1502 --- /dev/null +++ b/mcp_servers/salesforce/tools/cases.py @@ -0,0 +1,143 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_cases(account_id: Optional[str] = None, status: Optional[str] = None, priority: Optional[str] = None, limit: int = 50, fields: Optional[List[str]] = None, subject_contains: Optional[str] = None, case_type: Optional[str] = None) -> Dict[str, Any]: + """Get cases with flexible filtering options.""" + logger.info(f"Executing tool: get_cases with account_id: {account_id}, status: {status}, priority: {priority}, limit: {limit}, subject_contains: {subject_contains}, case_type: {case_type}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'CaseNumber', 'Subject', 'Status', 'Priority', 'Type', 'Reason', + 'AccountId', 'Account.Name', 'ContactId', 'Contact.Name', 'OwnerId', + 'CreatedDate', 'LastModifiedDate', 'ClosedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if account_id: + where_clauses.append(f"AccountId = '{account_id}'") + if status: + where_clauses.append(f"Status = '{status}'") + if priority: + where_clauses.append(f"Priority = '{priority}'") + if subject_contains: + # Case-insensitive subject search + subject_variations = [ + subject_contains.lower(), + subject_contains.upper(), + subject_contains.capitalize(), + subject_contains + ] + subject_like_conditions = " OR ".join([f"Subject LIKE '%{variation}%'" for variation in set(subject_variations)]) + where_clauses.append(f"({subject_like_conditions})") + if case_type: + where_clauses.append(f"Type = '{case_type}'") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Case{where_clause} ORDER BY CreatedDate DESC LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_cases: {e}") + raise e + +async def get_case_by_id(case_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific case by ID.""" + logger.info(f"Executing tool: get_case_by_id with case_id: {case_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'CaseNumber', 'Subject', 'Description', 'Status', 'Priority', + 'Type', 'Reason', 'Origin', 'AccountId', 'Account.Name', 'ContactId', + 'Contact.Name', 'Contact.Email', 'Contact.Phone', 'SuppliedName', + 'SuppliedEmail', 'SuppliedPhone', 'SuppliedCompany', 'OwnerId', + 'CreatedDate', 'LastModifiedDate', 'ClosedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Case WHERE Id = '{case_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_case_by_id: {e}") + raise e + +async def create_case(case_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new case.""" + logger.info(f"Executing tool: create_case") + try: + sf = get_salesforce_conn() + + # Validate required fields (Subject is typically required) + if 'Subject' not in case_data: + return { + "success": False, + "error": "Subject is required for Case creation", + "message": "Failed to create Case" + } + + result = sf.Case.create(case_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Case", case_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Case" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Case") + +async def update_case(case_id: str, case_data: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing case.""" + logger.info(f"Executing tool: update_case with case_id: {case_id}") + try: + sf = get_salesforce_conn() + + result = sf.Case.update(case_id, case_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(case_id, "updated", "Case", case_data) + else: + return { + "success": False, + "message": f"Failed to update Case. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Case") + +async def delete_case(case_id: str) -> Dict[str, Any]: + """Delete a case.""" + logger.info(f"Executing tool: delete_case with case_id: {case_id}") + try: + sf = get_salesforce_conn() + + result = sf.Case.delete(case_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(case_id, "deleted", "Case") + else: + return { + "success": False, + "message": f"Failed to delete Case. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Case") \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/contacts.py b/mcp_servers/salesforce/tools/contacts.py new file mode 100644 index 00000000..d6c6a268 --- /dev/null +++ b/mcp_servers/salesforce/tools/contacts.py @@ -0,0 +1,159 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_contacts(account_id: Optional[str] = None, limit: int = 50, fields: Optional[List[str]] = None, name_contains: Optional[str] = None, email_contains: Optional[str] = None, title_contains: Optional[str] = None) -> Dict[str, Any]: + """Get contacts with flexible filtering options.""" + logger.info(f"Executing tool: get_contacts with account_id: {account_id}, limit: {limit}, name_contains: {name_contains}, email_contains: {email_contains}, title_contains: {title_contains}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'FirstName', 'LastName', 'Email', 'Phone', 'Title', 'Department', + 'AccountId', 'Account.Name', 'OwnerId', 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if account_id: + where_clauses.append(f"AccountId = '{account_id}'") + if name_contains: + # Case-insensitive search for first or last name + name_variations = [ + name_contains.lower(), + name_contains.upper(), + name_contains.capitalize(), + name_contains + ] + name_like_conditions = [] + for variation in set(name_variations): + name_like_conditions.extend([ + f"FirstName LIKE '%{variation}%'", + f"LastName LIKE '%{variation}%'" + ]) + where_clauses.append(f"({' OR '.join(name_like_conditions)})") + if email_contains: + # Case-insensitive email search + email_variations = [ + email_contains.lower(), + email_contains.upper(), + email_contains + ] + email_like_conditions = " OR ".join([f"Email LIKE '%{variation}%'" for variation in set(email_variations)]) + where_clauses.append(f"({email_like_conditions})") + if title_contains: + # Case-insensitive title search + title_variations = [ + title_contains.lower(), + title_contains.upper(), + title_contains.capitalize(), + title_contains + ] + title_like_conditions = " OR ".join([f"Title LIKE '%{variation}%'" for variation in set(title_variations)]) + where_clauses.append(f"({title_like_conditions})") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Contact{where_clause} ORDER BY LastName, FirstName LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_contacts: {e}") + raise e + +async def get_contact_by_id(contact_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific contact by ID.""" + logger.info(f"Executing tool: get_contact_by_id with contact_id: {contact_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'FirstName', 'LastName', 'Email', 'Phone', 'MobilePhone', + 'Title', 'Department', 'AccountId', 'Account.Name', 'MailingStreet', + 'MailingCity', 'MailingState', 'MailingCountry', 'MailingPostalCode', + 'Birthdate', 'LeadSource', 'OwnerId', 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Contact WHERE Id = '{contact_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_contact_by_id: {e}") + raise e + +async def create_contact(contact_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new contact.""" + logger.info(f"Executing tool: create_contact") + try: + sf = get_salesforce_conn() + + # Validate required fields + if 'LastName' not in contact_data: + return { + "success": False, + "error": "LastName is required for Contact creation", + "message": "Failed to create Contact" + } + + result = sf.Contact.create(contact_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Contact", contact_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Contact" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Contact") + +async def update_contact(contact_id: str, contact_data: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing contact.""" + logger.info(f"Executing tool: update_contact with contact_id: {contact_id}") + try: + sf = get_salesforce_conn() + + result = sf.Contact.update(contact_id, contact_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(contact_id, "updated", "Contact", contact_data) + else: + return { + "success": False, + "message": f"Failed to update Contact. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Contact") + +async def delete_contact(contact_id: str) -> Dict[str, Any]: + """Delete a contact.""" + logger.info(f"Executing tool: delete_contact with contact_id: {contact_id}") + try: + sf = get_salesforce_conn() + + result = sf.Contact.delete(contact_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(contact_id, "deleted", "Contact") + else: + return { + "success": False, + "message": f"Failed to delete Contact. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Contact") \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/leads.py b/mcp_servers/salesforce/tools/leads.py new file mode 100644 index 00000000..7424f669 --- /dev/null +++ b/mcp_servers/salesforce/tools/leads.py @@ -0,0 +1,204 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_leads(status: Optional[str] = None, limit: int = 50, fields: Optional[List[str]] = None, name_contains: Optional[str] = None, company_contains: Optional[str] = None, email_contains: Optional[str] = None, industry: Optional[str] = None) -> Dict[str, Any]: + """Get leads with flexible filtering options.""" + logger.info(f"Executing tool: get_leads with status: {status}, limit: {limit}, name_contains: {name_contains}, company_contains: {company_contains}, email_contains: {email_contains}, industry: {industry}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'FirstName', 'LastName', 'Email', 'Phone', 'Company', 'Title', + 'Status', 'LeadSource', 'Industry', 'Rating', 'OwnerId', + 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if status: + where_clauses.append(f"Status = '{status}'") + if name_contains: + # Case-insensitive search for first or last name + name_variations = [ + name_contains.lower(), + name_contains.upper(), + name_contains.capitalize(), + name_contains + ] + name_like_conditions = [] + for variation in set(name_variations): + name_like_conditions.extend([ + f"FirstName LIKE '%{variation}%'", + f"LastName LIKE '%{variation}%'" + ]) + where_clauses.append(f"({' OR '.join(name_like_conditions)})") + if company_contains: + # Case-insensitive company search + company_variations = [ + company_contains.lower(), + company_contains.upper(), + company_contains.capitalize(), + company_contains + ] + company_like_conditions = " OR ".join([f"Company LIKE '%{variation}%'" for variation in set(company_variations)]) + where_clauses.append(f"({company_like_conditions})") + if email_contains: + # Case-insensitive email search + email_variations = [ + email_contains.lower(), + email_contains.upper(), + email_contains + ] + email_like_conditions = " OR ".join([f"Email LIKE '%{variation}%'" for variation in set(email_variations)]) + where_clauses.append(f"({email_like_conditions})") + if industry: + where_clauses.append(f"Industry = '{industry}'") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Lead{where_clause} ORDER BY CreatedDate DESC LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_leads: {e}") + raise e + +async def get_lead_by_id(lead_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific lead by ID.""" + logger.info(f"Executing tool: get_lead_by_id with lead_id: {lead_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'FirstName', 'LastName', 'Email', 'Phone', 'MobilePhone', + 'Company', 'Title', 'Status', 'LeadSource', 'Industry', 'Rating', + 'Street', 'City', 'State', 'Country', 'PostalCode', 'Website', + 'Description', 'NumberOfEmployees', 'AnnualRevenue', 'OwnerId', + 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Lead WHERE Id = '{lead_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_lead_by_id: {e}") + raise e + +async def create_lead(lead_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new lead.""" + logger.info(f"Executing tool: create_lead") + try: + sf = get_salesforce_conn() + + # Validate required fields + required_fields = ['LastName', 'Company'] + missing_fields = [field for field in required_fields if field not in lead_data] + + if missing_fields: + return { + "success": False, + "error": f"Required fields missing: {', '.join(missing_fields)}", + "message": "Failed to create Lead" + } + + result = sf.Lead.create(lead_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Lead", lead_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Lead" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Lead") + +async def update_lead(lead_id: str, lead_data: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing lead.""" + logger.info(f"Executing tool: update_lead with lead_id: {lead_id}") + try: + sf = get_salesforce_conn() + + result = sf.Lead.update(lead_id, lead_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(lead_id, "updated", "Lead", lead_data) + else: + return { + "success": False, + "message": f"Failed to update Lead. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Lead") + +async def delete_lead(lead_id: str) -> Dict[str, Any]: + """Delete a lead.""" + logger.info(f"Executing tool: delete_lead with lead_id: {lead_id}") + try: + sf = get_salesforce_conn() + + result = sf.Lead.delete(lead_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(lead_id, "deleted", "Lead") + else: + return { + "success": False, + "message": f"Failed to delete Lead. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Lead") + +async def convert_lead(lead_id: str, conversion_data: Dict[str, Any] = None) -> Dict[str, Any]: + """Convert a lead to account, contact, and optionally opportunity.""" + logger.info(f"Executing tool: convert_lead with lead_id: {lead_id}") + try: + sf = get_salesforce_conn() + + # Default conversion data if none provided + if not conversion_data: + conversion_data = { + 'doNotCreateOpportunity': True, # Set to False to create opportunity + 'sendNotificationEmail': False + } + + # Lead conversion is done via REST API + conversion_url = f"sobjects/Lead/{lead_id}/convert" + result = sf.restful(conversion_url, method='POST', json=conversion_data) + + if result.get('success'): + return { + "success": True, + "message": "Lead converted successfully", + "conversion_details": { + "lead_id": lead_id, + "account_id": result.get('accountId'), + "contact_id": result.get('contactId'), + "opportunity_id": result.get('opportunityId') + } + } + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to convert Lead" + } + + except Exception as e: + return handle_salesforce_error(e, "convert", "Lead") \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/metadata.py b/mcp_servers/salesforce/tools/metadata.py new file mode 100644 index 00000000..99d60f53 --- /dev/null +++ b/mcp_servers/salesforce/tools/metadata.py @@ -0,0 +1,96 @@ +import logging +from typing import Any, Dict, List +from .base import get_salesforce_conn + +# Configure logging +logger = logging.getLogger(__name__) + +async def execute_soql_query(query: str) -> Dict[str, Any]: + """Execute a SOQL query on Salesforce.""" + logger.info(f"Executing tool: execute_soql_query with query: {query}") + try: + sf = get_salesforce_conn() + result = sf.query(query) + return dict(result) + except Exception as e: + logger.exception(f"Error executing SOQL query: {e}") + raise e + +async def execute_tooling_query(query: str) -> Dict[str, Any]: + """Execute a query against the Salesforce Tooling API.""" + logger.info(f"Executing tool: execute_tooling_query with query: {query}") + try: + sf = get_salesforce_conn() + result = sf.toolingexecute(f"query/?q={query}") + return dict(result) + except Exception as e: + logger.exception(f"Error executing tooling query: {e}") + raise e + +async def describe_object(object_name: str, detailed: bool = False) -> Dict[str, Any]: + """Get detailed metadata about a Salesforce object.""" + logger.info(f"Executing tool: describe_object with object_name: {object_name}") + try: + sf = get_salesforce_conn() + sobject = getattr(sf, object_name) + result = sobject.describe() + + if detailed and object_name.endswith('__c'): + # For custom objects, get additional metadata if requested + metadata_result = sf.restful(f"sobjects/{object_name}/describe/") + return { + "describe": dict(result), + "metadata": metadata_result + } + + return dict(result) + except Exception as e: + logger.exception(f"Error describing object: {e}") + raise e + +async def get_component_source(metadata_type: str, component_names: List[str]) -> Dict[str, Any]: + """Retrieve metadata components from Salesforce.""" + logger.info(f"Executing tool: get_component_source with type: {metadata_type}") + try: + sf = get_salesforce_conn() + + # Valid metadata types + valid_types = [ + 'CustomObject', 'Flow', 'FlowDefinition', 'CustomField', + 'ValidationRule', 'ApexClass', 'ApexTrigger', 'WorkflowRule', 'Layout' + ] + + if metadata_type not in valid_types: + raise ValueError(f"Invalid metadata type: {metadata_type}") + + # Use Tooling API for metadata queries + results = [] + for name in component_names: + try: + if metadata_type == 'ApexClass': + query = f"SELECT Id, Name, Body FROM ApexClass WHERE Name = '{name}'" + elif metadata_type == 'ApexTrigger': + query = f"SELECT Id, Name, Body FROM ApexTrigger WHERE Name = '{name}'" + elif metadata_type == 'Flow': + query = f"SELECT Id, MasterLabel, Definition FROM Flow WHERE MasterLabel = '{name}'" + else: + # For other types, use general metadata query + query = f"SELECT Id, DeveloperName FROM {metadata_type} WHERE DeveloperName = '{name}'" + + result = sf.toolingexecute(f"query/?q={query}") + results.append({ + "name": name, + "type": metadata_type, + "data": dict(result) + }) + except Exception as e: + results.append({ + "name": name, + "type": metadata_type, + "error": str(e) + }) + + return {"results": results} + except Exception as e: + logger.exception(f"Error retrieving metadata: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/salesforce/tools/opportunities.py b/mcp_servers/salesforce/tools/opportunities.py new file mode 100644 index 00000000..ee8f85f3 --- /dev/null +++ b/mcp_servers/salesforce/tools/opportunities.py @@ -0,0 +1,195 @@ +import logging +from typing import Any, Dict, List, Optional +from .base import get_salesforce_conn, handle_salesforce_error, format_success_response + +# Configure logging +logger = logging.getLogger(__name__) + +async def get_opportunities( + account_id: Optional[str] = None, + stage: Optional[str] = None, + name_contains: Optional[str] = None, + account_name_contains: Optional[str] = None, + limit: int = 50, + fields: Optional[List[str]] = None +) -> Dict[str, Any]: + """Get opportunities, optionally filtered by account, stage, name, or account name.""" + logger.info(f"Executing tool: get_opportunities with account_id: {account_id}, stage: {stage}, name_contains: {name_contains}, account_name_contains: {account_name_contains}, limit: {limit}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'StageName', 'Amount', 'CloseDate', 'Probability', + 'AccountId', 'Account.Name', 'Type', 'LeadSource', 'OwnerId', + 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + + # Build query with optional filters + where_clauses = [] + if account_id: + where_clauses.append(f"AccountId = '{account_id}'") + if stage: + where_clauses.append(f"StageName = '{stage}'") + if name_contains: + # Case-insensitive search by trying multiple case variations + name_variations = [ + name_contains.lower(), + name_contains.upper(), + name_contains.capitalize(), + name_contains + ] + # Create OR conditions for different case variations + name_like_conditions = " OR ".join([f"Name LIKE '%{variation}%'" for variation in set(name_variations)]) + where_clauses.append(f"({name_like_conditions})") + + if account_name_contains: + # Case-insensitive search by trying multiple case variations + account_variations = [ + account_name_contains.lower(), + account_name_contains.upper(), + account_name_contains.capitalize(), + account_name_contains + ] + # Create OR conditions for different case variations + account_like_conditions = " OR ".join([f"Account.Name LIKE '%{variation}%'" for variation in set(account_variations)]) + where_clauses.append(f"({account_like_conditions})") + + where_clause = " WHERE " + " AND ".join(where_clauses) if where_clauses else "" + query = f"SELECT {field_list} FROM Opportunity{where_clause} ORDER BY CloseDate ASC LIMIT {limit}" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_opportunities: {e}") + raise e + +async def get_opportunity_by_id(opportunity_id: str, fields: Optional[List[str]] = None) -> Dict[str, Any]: + """Get a specific opportunity by ID.""" + logger.info(f"Executing tool: get_opportunity_by_id with opportunity_id: {opportunity_id}") + try: + sf = get_salesforce_conn() + + # Default fields if none specified + if not fields: + fields = ['Id', 'Name', 'StageName', 'Amount', 'CloseDate', 'Probability', + 'AccountId', 'Account.Name', 'Type', 'LeadSource', 'Description', + 'NextStep', 'CompetitorName__c', 'DeliveryInstallationStatus__c', + 'TrackingNumber__c', 'OrderNumber__c', 'CurrentGenerators__c', + 'MainCompetitors__c', 'OwnerId', 'CreatedDate', 'LastModifiedDate'] + + field_list = ', '.join(fields) + query = f"SELECT {field_list} FROM Opportunity WHERE Id = '{opportunity_id}'" + + result = sf.query(query) + return dict(result) + + except Exception as e: + logger.exception(f"Error executing tool get_opportunity_by_id: {e}") + raise e + +async def create_opportunity(opportunity_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new opportunity.""" + logger.info(f"Executing tool: create_opportunity") + try: + sf = get_salesforce_conn() + + # Validate required fields + required_fields = ['Name', 'StageName', 'CloseDate'] + missing_fields = [field for field in required_fields if field not in opportunity_data] + + if missing_fields: + return { + "success": False, + "error": f"Required fields missing: {', '.join(missing_fields)}", + "message": "Failed to create Opportunity" + } + + result = sf.Opportunity.create(opportunity_data) + + if result.get('success'): + return format_success_response(result.get('id'), "created", "Opportunity", opportunity_data) + else: + return { + "success": False, + "errors": result.get('errors', []), + "message": "Failed to create Opportunity" + } + + except Exception as e: + return handle_salesforce_error(e, "create", "Opportunity") + +async def update_opportunity( + opportunity_id: str, + closed_date: Optional[str] = None, + stage: Optional[str] = None, + amount: Optional[float] = None, + next_step: Optional[str] = None, + description: Optional[str] = None, + owner_id: Optional[str] = None, + account_id: Optional[str] = None +) -> Dict[str, Any]: + """Update an existing opportunity.""" + logger.info(f"Executing tool: update_opportunity with opportunity_id: {opportunity_id}") + try: + sf = get_salesforce_conn() + + # Build update data from provided parameters + opportunity_data = {} + if closed_date is not None: + opportunity_data['CloseDate'] = closed_date + if stage is not None: + opportunity_data['StageName'] = stage + if amount is not None: + opportunity_data['Amount'] = amount + if next_step is not None: + opportunity_data['NextStep'] = next_step + if description is not None: + opportunity_data['Description'] = description + if owner_id is not None: + opportunity_data['OwnerId'] = owner_id + if account_id is not None: + opportunity_data['AccountId'] = account_id + + # Only update if there's data to update + if not opportunity_data: + return { + "success": False, + "message": "No fields provided to update" + } + + result = sf.Opportunity.update(opportunity_id, opportunity_data) + + # simple-salesforce returns HTTP status code for updates + if result == 204: # HTTP 204 No Content indicates successful update + return format_success_response(opportunity_id, "updated", "Opportunity", opportunity_data) + else: + return { + "success": False, + "message": f"Failed to update Opportunity. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "update", "Opportunity") + +async def delete_opportunity(opportunity_id: str) -> Dict[str, Any]: + """Delete an opportunity.""" + logger.info(f"Executing tool: delete_opportunity with opportunity_id: {opportunity_id}") + try: + sf = get_salesforce_conn() + + result = sf.Opportunity.delete(opportunity_id) + + # simple-salesforce returns HTTP status code for deletes + if result == 204: # HTTP 204 No Content indicates successful deletion + return format_success_response(opportunity_id, "deleted", "Opportunity") + else: + return { + "success": False, + "message": f"Failed to delete Opportunity. Status code: {result}" + } + + except Exception as e: + return handle_salesforce_error(e, "delete", "Opportunity") \ No newline at end of file diff --git a/mcp_servers/shopify/.env.example b/mcp_servers/shopify/.env.example new file mode 100644 index 00000000..67a92093 --- /dev/null +++ b/mcp_servers/shopify/.env.example @@ -0,0 +1,8 @@ +# Port number to listen on +PORT=5000 + +# Shopify Authentication Token +SHOPIFY_SHOP_DOMAIN=your-shopify-store.shopify.com + +# Shopify api token (Make sure to set permissions properly, according to the use case) +SHOPIFY_ACCESS_TOKEN=your-shopify-api-token \ No newline at end of file diff --git a/mcp_servers/shopify/.eslintrc.json b/mcp_servers/shopify/.eslintrc.json new file mode 100644 index 00000000..6a1b1376 --- /dev/null +++ b/mcp_servers/shopify/.eslintrc.json @@ -0,0 +1,11 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/shopify/Dockerfile b/mcp_servers/shopify/Dockerfile new file mode 100644 index 00000000..779930e1 --- /dev/null +++ b/mcp_servers/shopify/Dockerfile @@ -0,0 +1,35 @@ +# Use a Node.js image as the base for building the application +FROM node:22-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/shopify/package.json mcp_servers/shopify/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/shopify . + +# Build the application using TypeScript +RUN npm run build + +# Use a smaller Node.js image for the final image +FROM node:22-slim AS release + +# Set the working directory inside the container +WORKDIR /app + +# Copy the built application from the builder stage +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json +# Install only production dependencies +RUN npm ci --omit=dev --ignore-scripts + +EXPOSE 5000 + +# Specify the command to run the application +ENTRYPOINT ["node", "dist/index.js"] diff --git a/mcp_servers/shopify/README.md b/mcp_servers/shopify/README.md new file mode 100644 index 00000000..227a464c --- /dev/null +++ b/mcp_servers/shopify/README.md @@ -0,0 +1,77 @@ +# Shopify MCP Server + +A Model Context Protocol (MCP) server for Shopify integration. Manage products, orders, customers, and store operations using Shopify's API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Shopify with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("SHOPIFY", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/shopify-mcp-server:latest + + +# Run Shopify MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/shopify-mcp-server:latest + +# Run Shopify MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_shopify_access_token_here"}' \ + ghcr.io/klavis-ai/shopify-mcp-server:latest +``` + +**OAuth Setup:** Shopify requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Product Management**: Create, read, update products and variants +- **Order Processing**: Manage orders, fulfillments, and refunds +- **Customer Management**: Handle customer data and profiles +- **Inventory Control**: Track and manage inventory levels +- **Store Analytics**: Access sales data and analytics + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/shopify/index.ts b/mcp_servers/shopify/index.ts new file mode 100644 index 00000000..dd8bf834 --- /dev/null +++ b/mcp_servers/shopify/index.ts @@ -0,0 +1,566 @@ +#!/usr/bin/env node +import express, { Request, Response as ExpressResponse } from 'express'; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + CallToolRequest, + CallToolRequestSchema, + ListToolsRequestSchema, + Tool, +} from "@modelcontextprotocol/sdk/types.js"; +import { z } from 'zod'; +import { AsyncLocalStorage } from "async_hooks"; +import { setTimeout } from 'timers/promises'; +import { ApiErrorResponse, ApiHeaders, AsyncLocalStorageState, CreateOrderArgs, CreateProductArgs, GetCustomerArgs, GetOrderArgs, GetProductArgs, ListCustomersArgs, ListOrdersArgs, ListProductsArgs, OrderStatus, ShopifyCredentials, UpdateProductArgs } from './types.js'; +import { createOrderTool, createProductTool, getCustomerTool, getOrderTool, getProductTool, listCustomersTool, listOrdersTool, listProductsTool, updateProductTool } from './tools.js'; +import dotenv from 'dotenv'; + +dotenv.config(); +class ShopifyClient { + private apiHeaders: ApiHeaders; + private shopDomain: string; + private lastRequestTime: number = 0; + private readonly minRequestInterval: number = 500; + private readonly apiVersion = '2025-04'; + + constructor(accessToken: string, shopDomain: string) { + this.apiHeaders = { + 'X-Shopify-Access-Token': accessToken, + 'Content-Type': 'application/json', + }; + this.shopDomain = shopDomain; + } + + private async respectRateLimit(): Promise { + const now = Date.now(); + const timeSinceLastRequest = now - this.lastRequestTime; + + if (timeSinceLastRequest < this.minRequestInterval) { + const waitTime = this.minRequestInterval - timeSinceLastRequest; + await setTimeout(waitTime); + } + + this.lastRequestTime = Date.now(); + } + + private async handleApiResponse(response: globalThis.Response): Promise { + if (response.ok) { + return await response.json() as T; + } + + const errorText = await response.text(); + let errorMessage = `Shopify API error: ${response.status}`; + + try { + const errorJson = JSON.parse(errorText) as ApiErrorResponse; + errorMessage = `Shopify API error: ${response.status} - ${JSON.stringify(errorJson)}`; + + // Handle rate limiting + if (response.status === 429) { + const retryAfter = parseInt(response.headers.get('Retry-After') || '10', 10); + console.warn(`Rate limited. Retrying after ${retryAfter} seconds`); + await setTimeout(retryAfter * 1000); + // In a production implementation, you would retry the request here + // For now, just throwing the error to be consistent with existing code + throw new Error(`Rate limit exceeded. Retry after ${retryAfter} seconds.`); + } + + // Handle GraphQL-specific errors + if (errorJson.errors) { + const graphQLErrors = errorJson.errors; + errorMessage = `GraphQL errors: ${JSON.stringify(graphQLErrors)}`; + + // Check for ShopifyQL specific errors + if (typeof graphQLErrors === 'object' && Array.isArray(graphQLErrors)) { + const shopifyQLErrors = graphQLErrors.filter((err: any) => + err.message && err.message.includes('ShopifyQL')); + + if (shopifyQLErrors.length > 0) { + errorMessage = `ShopifyQL error: ${shopifyQLErrors.map((e: any) => e.message).join('; ')}`; + } + } + } + } catch (e) { + errorMessage = `Shopify API error: ${response.status} - ${errorText}`; + } + + throw new Error(errorMessage); + } + + refreshToken(): boolean { + const credentials = getShopifyCredentials(); + if (credentials.accessToken && credentials.shopDomain) { + this.apiHeaders['X-Shopify-Access-Token'] = credentials.accessToken; + this.shopDomain = credentials.shopDomain; + return true; + } + return false; + } + + async listProducts(limit: number = 50, cursor?: string, collection_id?: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const params = new URLSearchParams({ + limit: Math.min(limit, 250).toString(), + }); + + if (cursor) { + params.append("page_info", cursor); + params.append("limit", Math.min(limit, 250).toString()); + } + + if (collection_id) { + params.append("collection_id", collection_id); + } + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/products.json?${params}`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } + + async getProduct(product_id: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/products/${product_id}.json`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } + + async createProduct(productData: CreateProductArgs): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/products.json`, + { + method: "POST", + headers: this.apiHeaders, + body: JSON.stringify({ product: productData }), + } + ); + + return this.handleApiResponse>(response); + } + + async updateProduct(product_id: string, productData: Partial): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/products/${product_id}.json`, + { + method: "PUT", + headers: this.apiHeaders, + body: JSON.stringify({ product: productData }), + } + ); + + return this.handleApiResponse>(response); + } + + async listOrders(limit: number = 50, status: OrderStatus = "any", cursor?: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const params = new URLSearchParams({ + limit: Math.min(limit, 250).toString(), + status: status, + }); + + if (cursor) { + params.append("page_info", cursor); + params.append("limit", Math.min(limit, 250).toString()); + } + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/orders.json?${params}`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } + + async getOrder(order_id: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/orders/${order_id}.json`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } + + async createOrder(orderData: CreateOrderArgs): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/orders.json`, + { + method: "POST", + headers: this.apiHeaders, + body: JSON.stringify({ order: orderData }), + } + ); + + return this.handleApiResponse>(response); + } + + async listCustomers(limit: number = 50, cursor?: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const params = new URLSearchParams({ + limit: Math.min(limit, 250).toString(), + }); + + if (cursor) { + params.append("page_info", cursor); + params.append("limit", Math.min(limit, 250).toString()); + } + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/customers.json?${params}`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } + + async getCustomer(customer_id: string): Promise> { + this.refreshToken(); + await this.respectRateLimit(); + + const response = await fetch( + `https://${this.shopDomain}/admin/api/${this.apiVersion}/customers/${customer_id}.json`, + { headers: this.apiHeaders } + ); + + return this.handleApiResponse>(response); + } +} + +const getShopifyMcpServer = (): Server => { + const server = new Server( + { + name: "shopify-mcp-server", + version: "0.1.0", + }, + { + capabilities: { + tools: {}, + }, + } + ); + server.setRequestHandler( + ListToolsRequestSchema, + async () => { + return { + tools: [ + listProductsTool, + getProductTool, + createProductTool, + updateProductTool, + listOrdersTool, + getOrderTool, + createOrderTool, + listCustomersTool, + getCustomerTool + ], + }; + } + ); + + server.setRequestHandler( + CallToolRequestSchema, + async (request: CallToolRequest) => { + try { + if (!request.params?.name) { + throw new Error("Missing tool name"); + } + + const credentials = getShopifyCredentials(); + if (!credentials.accessToken || !credentials.shopDomain) { + throw new Error("No valid Shopify credentials found for this instance"); + } + + const shopifyClient = new ShopifyClient(credentials.accessToken, credentials.shopDomain); + + switch (request.params.name) { + case "shopify_list_products": { + const args = request.params.arguments as unknown as ListProductsArgs; + const response = await shopifyClient.listProducts( + args.limit, + args.cursor, + args.collection_id + ); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_get_product": { + const args = request.params.arguments as unknown as GetProductArgs; + if (!args.product_id) { + throw new Error("Missing required argument: product_id"); + } + const response = await shopifyClient.getProduct(args.product_id); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_create_product": { + const args = request.params.arguments as unknown as CreateProductArgs; + if (!args.title) { + throw new Error("Missing required argument: title"); + } + const response = await shopifyClient.createProduct(args); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_update_product": { + const args = request.params.arguments as unknown as UpdateProductArgs; + if (!args.product_id) { + throw new Error("Missing required argument: product_id"); + } + const { product_id, ...productData } = args; + const response = await shopifyClient.updateProduct(product_id, productData); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_list_orders": { + const args = request.params.arguments as unknown as ListOrdersArgs; + const response = await shopifyClient.listOrders( + args.limit, + args.status as OrderStatus, + args.cursor + ); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_get_order": { + const args = request.params.arguments as unknown as GetOrderArgs; + if (!args.order_id) { + throw new Error("Missing required argument: order_id"); + } + const response = await shopifyClient.getOrder(args.order_id); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_create_order": { + const args = request.params.arguments as unknown as CreateOrderArgs; + if (!args.line_items || args.line_items.length === 0) { + throw new Error("Missing required argument: line_items"); + } + const response = await shopifyClient.createOrder(args); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_list_customers": { + const args = request.params.arguments as unknown as ListCustomersArgs; + const response = await shopifyClient.listCustomers( + args.limit, + args.cursor + ); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + case "shopify_get_customer": { + const args = request.params.arguments as unknown as GetCustomerArgs; + if (!args.customer_id) { + throw new Error("Missing required argument: customer_id"); + } + const response = await shopifyClient.getCustomer(args.customer_id); + return { + content: [{ type: "text", text: JSON.stringify(response) }], + } as const; + } + + default: + throw new Error(`Unknown tool: ${request.params.name}`); + } + } catch (error) { + console.error("Error executing tool:", error); + + if (error instanceof z.ZodError) { + throw new Error(`Invalid input: ${JSON.stringify(error.errors)}`); + } + + throw error; + } + } + ); + + return server; +}; + +const asyncLocalStorage = new AsyncLocalStorage(); + +function extractAuthData(req: Request): { access_token?: string; shop_domain?: string } { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Shopify access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return JSON.parse('{}'); + } + + const authDataJson = JSON.parse(authData) as { access_token?: string; shop_domain?: string }; + return authDataJson; +} + +function getShopifyCredentials(): ShopifyCredentials { + if (process.env.SHOPIFY_ACCESS_TOKEN && process.env.SHOPIFY_SHOP_DOMAIN) { + return { + accessToken: process.env.SHOPIFY_ACCESS_TOKEN, + shopDomain: process.env.SHOPIFY_SHOP_DOMAIN, + }; + } + const store = asyncLocalStorage.getStore(); + return { + accessToken: store?.shopify_access_token, + shopDomain: store?.shopify_shop_domain, + }; +} + +const app = express(); +app.use(express.json()); + +app.post('/mcp', async (req: Request, res: ExpressResponse) => { + const authData = extractAuthData(req); + const accessToken = authData.access_token ?? ''; + const shopDomain = authData.shop_domain ?? ''; + + const server = getShopifyMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ + shopify_access_token: accessToken, + shopify_shop_domain: shopDomain + }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: ExpressResponse) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: ExpressResponse) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +const transports = new Map(); + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport(`/messages`, res); + + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getShopifyMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + + let transport: SSEServerTransport | undefined; + transport = sessionId ? transports.get(sessionId) : undefined; + if (transport) { + const authData = extractAuthData(req); + const accessToken = authData.access_token ?? ''; + const shopDomain = authData.shop_domain ?? ''; + + asyncLocalStorage.run({ + shopify_access_token: accessToken, + shopify_shop_domain: shopDomain + }, async () => { + await transport.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +app.listen(process.env.PORT || 5000, () => { + console.log(`server running on port ${process.env.PORT || 5000}`); +}); \ No newline at end of file diff --git a/mcp_servers/slack/package-lock.json b/mcp_servers/shopify/package-lock.json similarity index 83% rename from mcp_servers/slack/package-lock.json rename to mcp_servers/shopify/package-lock.json index 2b86b8d1..e75851ff 100644 --- a/mcp_servers/slack/package-lock.json +++ b/mcp_servers/shopify/package-lock.json @@ -1,23 +1,24 @@ { - "name": "@modelcontextprotocol/server-slack", - "version": "0.6.2", + "name": "@klavis-ai/shopify-mcp-server", + "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "@modelcontextprotocol/server-slack", - "version": "0.6.2", + "name": "@klavis-ai/shopify-mcp-server", + "version": "0.1.0", "license": "MIT", "dependencies": { - "@modelcontextprotocol/sdk": "^1.8.0", + "@modelcontextprotocol/sdk": "^1.12.1", "@supabase/supabase-js": "^2.49.1", "@types/node": "^22", + "dotenv": "^16.3.1", "express": "^4.21.2", "zod": "^3.22.4", "zod-to-json-schema": "^3.23.5" }, "bin": { - "mcp-server-slack": "dist/index.js" + "mcp-server-shopify": "dist/index.js" }, "devDependencies": { "@types/express": "^5.0.0", @@ -28,16 +29,20 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.8.0", + "version": "1.17.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz", + "integrity": "sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", - "pkce-challenge": "^4.1.0", + "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" @@ -46,8 +51,81 @@ "node": ">=18" } }, + "node_modules/@modelcontextprotocol/sdk/node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, "node_modules/@modelcontextprotocol/sdk/node_modules/eventsource": { - "version": "3.0.6", + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", "license": "MIT", "dependencies": { "eventsource-parser": "^3.0.1" @@ -57,57 +135,206 @@ } }, "node_modules/@modelcontextprotocol/sdk/node_modules/express": { - "version": "5.0.1", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", "license": "MIT", "dependencies": { "accepts": "^2.0.0", - "body-parser": "^2.0.1", + "body-parser": "^2.2.0", "content-disposition": "^1.0.0", - "content-type": "~1.0.4", - "cookie": "0.7.1", + "content-type": "^1.0.5", + "cookie": "^0.7.1", "cookie-signature": "^1.2.1", - "debug": "4.3.6", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "^2.0.0", - "fresh": "2.0.0", - "http-errors": "2.0.0", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", - "methods": "~1.1.2", "mime-types": "^3.0.0", - "on-finished": "2.4.1", - "once": "1.4.0", - "parseurl": "~1.3.3", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "router": "^2.0.0", - "safe-buffer": "5.2.1", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", "send": "^1.1.0", - "serve-static": "^2.1.0", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "^2.0.0", - "utils-merge": "1.0.1", - "vary": "~1.1.2" + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, - "node_modules/@modelcontextprotocol/sdk/node_modules/express-rate-limit": { - "version": "7.5.0", + "node_modules/@modelcontextprotocol/sdk/node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", "license": "MIT", "engines": { - "node": ">= 16" + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" }, "funding": { - "url": "https://github.com/sponsors/express-rate-limit" + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" }, - "peerDependencies": { - "express": "^4.11 || 5 || ^5.0.0-beta.1" + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" } }, "node_modules/@supabase/auth-js": { @@ -207,6 +434,8 @@ }, "node_modules/@types/express": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.1.tgz", + "integrity": "sha512-UZUw8vjpWFXuDnjFTh7/5c2TWDlQqeXHi6hcN7F2XSVT5P+WmUnnbFS3KA6Jnc6IsEqI2qCVu2bK0R0J4A8ZQQ==", "dev": true, "license": "MIT", "dependencies": { @@ -217,6 +446,8 @@ }, "node_modules/@types/express-serve-static-core": { "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", "dev": true, "license": "MIT", "dependencies": { @@ -241,12 +472,12 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.13.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.17.tgz", - "integrity": "sha512-nAJuQXoyPj04uLgu+obZcSmsfOenUg6DxPKogeUy6yNCFwWaj5sBF8/G/pNo8EtBJjAfSVgfIlugR/BCOleO+g==", + "version": "22.15.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.17.tgz", + "integrity": "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==", "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "node_modules/@types/phoenix": { @@ -302,16 +533,34 @@ } }, "node_modules/accepts": { - "version": "2.0.0", + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", "license": "MIT", "dependencies": { - "mime-types": "^3.0.0", - "negotiator": "^1.0.0" + "mime-types": "~2.1.34", + "negotiator": "0.6.3" }, "engines": { "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -326,53 +575,42 @@ "license": "MIT" }, "node_modules/body-parser": { - "version": "2.2.0", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "license": "MIT", "dependencies": { - "bytes": "^3.1.2", - "content-type": "^1.0.5", - "debug": "^4.4.0", - "http-errors": "^2.0.0", - "iconv-lite": "^0.6.3", - "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" }, "engines": { - "node": ">=18" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "4.4.0", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.1.3", - "license": "MIT" - }, - "node_modules/body-parser/node_modules/qs": { - "version": "6.14.0", - "license": "BSD-3-Clause", + "node_modules/body-parser/node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", "dependencies": { - "side-channel": "^1.1.0" + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" }, "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 0.8" } }, "node_modules/brace-expansion": { @@ -432,7 +670,9 @@ "license": "MIT" }, "node_modules/content-disposition": { - "version": "1.0.0", + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "license": "MIT", "dependencies": { "safe-buffer": "5.2.1" @@ -460,11 +700,10 @@ } }, "node_modules/cookie-signature": { - "version": "1.2.2", - "license": "MIT", - "engines": { - "node": ">=6.6.0" - } + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" }, "node_modules/cors": { "version": "2.8.5", @@ -495,6 +734,8 @@ }, "node_modules/data-uri-to-buffer": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", "dev": true, "license": "MIT", "engines": { @@ -502,18 +743,12 @@ } }, "node_modules/debug": { - "version": "4.3.6", + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "license": "MIT", "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "ms": "2.0.0" } }, "node_modules/depd": { @@ -535,6 +770,18 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -619,299 +866,87 @@ "node": ">=12.0.0" } }, - "node_modules/eventsource-parser": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", - "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", - "license": "MIT", - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.7.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express/node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/express/node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", - "license": "MIT" - }, - "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/express/node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/express/node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/express/node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/express/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/express/node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "node_modules/eventsource-parser": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", + "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, "engines": { - "node": ">= 0.8" + "node": ">=18.0.0" } }, - "node_modules/express/node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "license": "MIT", "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", "range-parser": "~1.2.1", - "statuses": "2.0.1" + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, - "node_modules/express/node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, - "node_modules/express/node_modules/send/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "license": "MIT" }, - "node_modules/express/node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", - "license": "MIT", - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/express/node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" }, "node_modules/fetch-blob": { "version": "3.2.0", @@ -938,39 +973,23 @@ } }, "node_modules/finalhandler": { - "version": "2.1.0", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "license": "MIT", "dependencies": { - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "on-finished": "^2.4.1", - "parseurl": "^1.3.3", - "statuses": "^2.0.1" + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, - "node_modules/finalhandler/node_modules/debug": { - "version": "4.4.0", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.1.3", - "license": "MIT" - }, "node_modules/formdata-polyfill": { "version": "4.0.10", "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", @@ -994,10 +1013,12 @@ } }, "node_modules/fresh": { - "version": "2.0.0", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/fs.realpath": { @@ -1128,10 +1149,12 @@ } }, "node_modules/iconv-lite": { - "version": "0.6.3", + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "license": "MIT", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" + "safer-buffer": ">= 2.1.2 < 3" }, "engines": { "node": ">=0.10.0" @@ -1202,6 +1225,12 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "license": "ISC" }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -1212,18 +1241,19 @@ } }, "node_modules/media-typer": { - "version": "1.1.0", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/merge-descriptors": { - "version": "2.0.0", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", "license": "MIT", - "engines": { - "node": ">=18" - }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } @@ -1250,17 +1280,21 @@ } }, "node_modules/mime-db": { - "version": "1.54.0", + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { - "version": "3.0.1", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "license": "MIT", "dependencies": { - "mime-db": "^1.54.0" + "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" @@ -1290,11 +1324,15 @@ } }, "node_modules/ms": { - "version": "2.1.2", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "license": "MIT" }, "node_modules/negotiator": { - "version": "1.0.0", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", "license": "MIT", "engines": { "node": ">= 0.6" @@ -1304,6 +1342,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", "dev": true, "funding": [ { @@ -1322,6 +1361,8 @@ }, "node_modules/node-fetch": { "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", "dev": true, "license": "MIT", "dependencies": { @@ -1421,9 +1462,9 @@ "license": "MIT" }, "node_modules/pkce-challenge": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-4.1.0.tgz", - "integrity": "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", "license": "MIT", "engines": { "node": ">=16.20.0" @@ -1442,6 +1483,15 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.13.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", @@ -1481,6 +1531,18 @@ "node": ">= 0.8" } }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/rechoir": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", @@ -1589,40 +1651,57 @@ "license": "MIT" }, "node_modules/send": { - "version": "1.2.0", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "license": "MIT", "dependencies": { - "debug": "^4.3.5", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", - "ms": "^2.1.3", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "statuses": "^2.0.1" + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" }, "engines": { - "node": ">= 18" + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" } }, "node_modules/send/node_modules/ms": { "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, "node_modules/serve-static": { - "version": "2.2.0", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "license": "MIT", "dependencies": { - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "parseurl": "^1.3.3", - "send": "^1.2.0" + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" }, "engines": { - "node": ">= 18" + "node": ">= 0.8.0" } }, "node_modules/setprototypeof": { @@ -1797,21 +1876,22 @@ "license": "MIT" }, "node_modules/type-is": { - "version": "2.0.1", + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "license": "MIT", "dependencies": { - "content-type": "^1.0.5", - "media-typer": "^1.1.0", - "mime-types": "^3.0.0" + "media-typer": "0.3.0", + "mime-types": "~2.1.24" }, "engines": { "node": ">= 0.6" } }, "node_modules/typescript": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", - "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -1823,9 +1903,9 @@ } }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "license": "MIT" }, "node_modules/unpipe": { @@ -1837,6 +1917,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1903,9 +1992,9 @@ "license": "ISC" }, "node_modules/ws": { - "version": "8.18.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz", - "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", + "version": "8.18.2", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.2.tgz", + "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==", "license": "MIT", "engines": { "node": ">=10.0.0" @@ -1924,7 +2013,9 @@ } }, "node_modules/zod": { - "version": "3.24.2", + "version": "3.24.4", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.4.tgz", + "integrity": "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" @@ -1932,6 +2023,8 @@ }, "node_modules/zod-to-json-schema": { "version": "3.24.5", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz", + "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==", "license": "ISC", "peerDependencies": { "zod": "^3.24.1" diff --git a/mcp_servers/slack/package.json b/mcp_servers/shopify/package.json similarity index 70% rename from mcp_servers/slack/package.json rename to mcp_servers/shopify/package.json index 14a59e1a..883a7efe 100644 --- a/mcp_servers/slack/package.json +++ b/mcp_servers/shopify/package.json @@ -1,12 +1,12 @@ { - "name": "@klavis-ai/slack-mcp-server", - "version": "0.6.2", - "description": "MCP server for interacting with Slack", + "name": "@klavis-ai/shopify-mcp-server", + "version": "0.1.0", + "description": "MCP server for interacting with Shopify", "license": "MIT", "homepage": "https://www.klavis.ai/", "type": "module", "bin": { - "mcp-server-slack": "dist/index.js" + "mcp-server-shopify": "dist/index.js" }, "files": [ "dist" @@ -19,12 +19,13 @@ "test": "node test-sse.js" }, "dependencies": { - "@modelcontextprotocol/sdk": "^1.8.0", + "@modelcontextprotocol/sdk": "^1.12.1", "@types/node": "^22", "express": "^4.21.2", "@supabase/supabase-js": "^2.49.1", "zod": "^3.22.4", - "zod-to-json-schema": "^3.23.5" + "zod-to-json-schema": "^3.23.5", + "dotenv": "^16.3.1" }, "devDependencies": { "@types/express": "^5.0.0", diff --git a/mcp_servers/shopify/tools.ts b/mcp_servers/shopify/tools.ts new file mode 100644 index 00000000..bf083672 --- /dev/null +++ b/mcp_servers/shopify/tools.ts @@ -0,0 +1,331 @@ +import { Tool } from "@modelcontextprotocol/sdk/types.js"; + +export const listProductsTool: Tool = { + name: "shopify_list_products", + description: "List products in the Shopify store with pagination", + inputSchema: { + type: "object", + properties: { + limit: { + type: "number", + description: "Maximum number of products to return (default 50, max 250)", + default: 50, + }, + cursor: { + type: "string", + description: "Pagination cursor for next page of results", + }, + collection_id: { + type: "string", + description: "Filter products by collection ID", + }, + }, + }, + annotations: { + category: "SHOPIFY_PRODUCT", + readOnlyHint: true, + }, +}; + +export const getProductTool: Tool = { + name: "shopify_get_product", + description: "Get detailed information about a specific product", + inputSchema: { + type: "object", + properties: { + product_id: { + type: "string", + description: "The ID of the product to retrieve", + }, + }, + required: ["product_id"], + }, + annotations: { + category: "SHOPIFY_PRODUCT", + readOnlyHint: true, + }, +}; + +export const createProductTool: Tool = { + name: "shopify_create_product", + description: "Create a new product in the Shopify store", + inputSchema: { + type: "object", + properties: { + title: { + type: "string", + description: "The title of the product", + }, + body_html: { + type: "string", + description: "The HTML description of the product", + }, + vendor: { + type: "string", + description: "The name of the product vendor", + }, + product_type: { + type: "string", + description: "The type of product", + }, + tags: { + type: "string", + description: "Comma-separated list of tags", + }, + status: { + type: "string", + description: "Product status (active, draft, archived)", + enum: ["active", "draft", "archived"], + }, + variants: { + type: "array", + description: "Product variants", + items: { + type: "object", + properties: { + price: { + type: "string", + description: "Variant price (e.g., '29.99')", + }, + sku: { + type: "string", + description: "Stock keeping unit", + }, + inventory_quantity: { + type: "number", + description: "Inventory quantity", + }, + option1: { + type: "string", + description: "First option (e.g., 'Blue')", + }, + option2: { + type: "string", + description: "Second option (e.g., 'Small')", + }, + option3: { + type: "string", + description: "Third option", + }, + }, + required: ["price"], + }, + }, + }, + required: ["title"], + }, + annotations: { + category: "SHOPIFY_PRODUCT", + }, +}; + +export const updateProductTool: Tool = { + name: "shopify_update_product", + description: "Update an existing product in the Shopify store", + inputSchema: { + type: "object", + properties: { + product_id: { + type: "string", + description: "The ID of the product to update", + }, + title: { + type: "string", + description: "The title of the product", + }, + body_html: { + type: "string", + description: "The HTML description of the product", + }, + vendor: { + type: "string", + description: "The name of the product vendor", + }, + product_type: { + type: "string", + description: "The type of product", + }, + tags: { + type: "string", + description: "Comma-separated list of tags", + }, + status: { + type: "string", + description: "Product status (active, draft, archived)", + enum: ["active", "draft", "archived"], + }, + }, + required: ["product_id"], + }, + annotations: { + category: "SHOPIFY_PRODUCT", + }, +}; + +export const listOrdersTool: Tool = { + name: "shopify_list_orders", + description: "List orders in the Shopify store with pagination", + inputSchema: { + type: "object", + properties: { + limit: { + type: "number", + description: "Maximum number of orders to return (default 50, max 250)", + default: 50, + }, + status: { + type: "string", + description: "Filter by order status (open, closed, cancelled, any)", + enum: ["open", "closed", "cancelled", "any"], + default: "any", + }, + cursor: { + type: "string", + description: "Pagination cursor for next page of results", + }, + }, + }, + annotations: { + category: "SHOPIFY_ORDER", + readOnlyHint: true, + }, +}; + +export const getOrderTool: Tool = { + name: "shopify_get_order", + description: "Get detailed information about a specific order", + inputSchema: { + type: "object", + properties: { + order_id: { + type: "string", + description: "The ID of the order to retrieve", + }, + }, + required: ["order_id"], + }, + annotations: { + category: "SHOPIFY_ORDER", + readOnlyHint: true, + }, +}; + +export const createOrderTool: Tool = { + name: "shopify_create_order", + description: "Create a new order in the Shopify store", + inputSchema: { + type: "object", + properties: { + customer: { + type: "object", + description: "Customer information", + properties: { + email: { + type: "string", + description: "Customer email", + }, + first_name: { + type: "string", + description: "Customer first name", + }, + last_name: { + type: "string", + description: "Customer last name", + }, + }, + required: ["email"], + }, + line_items: { + type: "array", + description: "Products to include in the order", + items: { + type: "object", + properties: { + variant_id: { + type: "number", + description: "The product variant ID", + }, + quantity: { + type: "number", + description: "Quantity of the product", + }, + }, + required: ["variant_id", "quantity"], + }, + }, + shipping_address: { + type: "object", + description: "Shipping address", + properties: { + address1: { + type: "string", + description: "Address line 1", + }, + city: { + type: "string", + description: "City", + }, + province: { + type: "string", + description: "Province or state", + }, + country: { + type: "string", + description: "Country", + }, + zip: { + type: "string", + description: "Zip or postal code", + }, + }, + required: ["address1", "city", "country", "zip"], + }, + }, + required: ["line_items"], + }, + annotations: { + category: "SHOPIFY_ORDER", + }, +}; + +export const listCustomersTool: Tool = { + name: "shopify_list_customers", + description: "List customers in the Shopify store with pagination", + inputSchema: { + type: "object", + properties: { + limit: { + type: "number", + description: "Maximum number of customers to return (default 50, max 250)", + default: 50, + }, + cursor: { + type: "string", + description: "Pagination cursor for next page of results", + }, + }, + }, + annotations: { + category: "SHOPIFY_CUSTOMER", + readOnlyHint: true, + }, +}; + +export const getCustomerTool: Tool = { + name: "shopify_get_customer", + description: "Get detailed information about a specific customer", + inputSchema: { + type: "object", + properties: { + customer_id: { + type: "string", + description: "The ID of the customer to retrieve", + }, + }, + required: ["customer_id"], + }, + annotations: { + category: "SHOPIFY_CUSTOMER", + readOnlyHint: true, + }, +}; diff --git a/mcp_servers/shopify/tsconfig.json b/mcp_servers/shopify/tsconfig.json new file mode 100644 index 00000000..f098b87e --- /dev/null +++ b/mcp_servers/shopify/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true + }, + "include": ["./**/*.ts"] +} diff --git a/mcp_servers/shopify/types.ts b/mcp_servers/shopify/types.ts new file mode 100644 index 00000000..51a44881 --- /dev/null +++ b/mcp_servers/shopify/types.ts @@ -0,0 +1,109 @@ +export interface ListProductsArgs { + limit?: number; + cursor?: string; + collection_id?: string; +} + +export interface GetProductArgs { + product_id: string; +} + +export interface ProductVariant { + price: string; + sku?: string; + inventory_quantity?: number; + option1?: string; + option2?: string; + option3?: string; +} + +export interface CreateProductArgs { + title: string; + body_html?: string; + vendor?: string; + product_type?: string; + tags?: string; + status?: string; + variants?: ProductVariant[]; +} + +export interface UpdateProductArgs { + product_id: string; + title?: string; + body_html?: string; + vendor?: string; + product_type?: string; + tags?: string; + status?: string; +} + +export interface ListOrdersArgs { + limit?: number; + status?: string; + cursor?: string; +} + +export interface GetOrderArgs { + order_id: string; +} + +export interface OrderLineItem { + variant_id: number; + quantity: number; +} + +export interface OrderCustomer { + email: string; + first_name?: string; + last_name?: string; +} + +export interface ShippingAddress { + address1: string; + city: string; + province?: string; + country: string; + zip: string; +} + +export interface CreateOrderArgs { + customer?: OrderCustomer; + line_items: OrderLineItem[]; + shipping_address?: ShippingAddress; +} + +export interface ListCustomersArgs { + limit?: number; + cursor?: string; +} + +export interface GetCustomerArgs { + customer_id: string; +} + +export interface ShopifyCredentials { + accessToken?: string; + shopDomain?: string; +} + +export interface AsyncLocalStorageState { + shopify_access_token: string; + shopify_shop_domain: string; +} + +export interface ApiHeaders { + [key: string]: string; +} + +export interface ApiErrorResponse { + errors?: unknown; + [key: string]: unknown; +} + +type ContentItem = { + type: string; + text: string; +} + +export type OrderStatus = 'open' | 'closed' | 'cancelled' | 'any'; +export type ProductStatus = 'active' | 'draft' | 'archived'; diff --git a/mcp_servers/slack/.env.example b/mcp_servers/slack/.env.example index af16a38f..f595771f 100644 --- a/mcp_servers/slack/.env.example +++ b/mcp_servers/slack/.env.example @@ -1,5 +1,14 @@ -# Slack Authentication Token -SLACK_AUTH_TOKEN=xoxb-your-token-here +# Slack MCP Server Configuration -# Slack Team ID (already used in the code) -SLACK_TEAM_ID=your-team-id-here \ No newline at end of file +# Server Configuration +SLACK_MCP_SERVER_PORT=5000 + +# For local development - Bot Token (xoxb-...) +# This token is used for general workspace operations +SLACK_BOT_TOKEN=xoxb-your-bot-token-here + +# For local development - User Token (xoxp-...) +# This token is used for user-specific operations (DMs, private channels, user status) +SLACK_USER_TOKEN=xoxp-your-user-token-here + +# Note: if Using Klavis Cloud, we'll handle all these ^ from Klavis OAuth proxy. diff --git a/mcp_servers/slack/Dockerfile b/mcp_servers/slack/Dockerfile index a790ad7b..8d8ee223 100644 --- a/mcp_servers/slack/Dockerfile +++ b/mcp_servers/slack/Dockerfile @@ -1,25 +1,22 @@ -FROM node:22-alpine AS builder - -COPY mcp_servers/slack /app -COPY mcp_servers/tsconfig.json /tsconfig.json +FROM python:3.12-slim WORKDIR /app -RUN --mount=type=cache,target=/root/.npm npm install -RUN npm run build - -FROM node:22-alpine AS release +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* -COPY --from=builder /app/dist /app/dist -COPY --from=builder /app/package.json /app/package.json -COPY --from=builder /app/package-lock.json /app/package-lock.json +# Copy only the requirements first to leverage Docker cache +COPY mcp_servers/slack/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt -ENV NODE_ENV=production +COPY mcp_servers/slack/server.py . +COPY mcp_servers/slack/bot_tools/ ./bot_tools/ +COPY mcp_servers/slack/user_tools/ ./user_tools/ +# Expose the port the server runs on EXPOSE 5000 -WORKDIR /app - -RUN npm ci --ignore-scripts --omit-dev - -ENTRYPOINT ["node", "dist/index.js"] +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/slack/README.md b/mcp_servers/slack/README.md index f0116628..11fd2bea 100644 --- a/mcp_servers/slack/README.md +++ b/mcp_servers/slack/README.md @@ -1,119 +1,85 @@ # Slack MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) - -This server implements the Model Context Protocol (MCP) to provide access to various Slack API functionalities as tools for language models or other MCP clients. It allows interacting with Slack workspaces programmatically through a standardized interface. - -This server is based on the reference implementation from [modelcontextprotocol/servers](https://github.com/modelcontextprotocol/servers/tree/main/src/slack). - -## Features - -The server exposes the following Slack API functions as MCP tools: - -* `slack_list_channels`: List public channels in the workspace. -* `slack_post_message`: Post a new message to a channel. -* `slack_reply_to_thread`: Reply to a specific message thread. -* `slack_add_reaction`: Add a reaction emoji to a message. -* `slack_get_channel_history`: Get recent messages from a channel. -* `slack_get_thread_replies`: Get all replies in a message thread. -* `slack_get_users`: Get a list of users in the workspace. -* `slack_get_user_profile`: Get detailed profile information for a user. - -## Prerequisites - -Before you begin, ensure you have the following: - -* **Node.js and npm:** Required for local development (check versions with `node -v` and `npm -v`). -* **Docker:** Required for running the server in a container (Recommended). -* **Slack Bot Token:** A Slack Bot token with the necessary permissions (scopes) to perform the actions listed in the Features section (e.g., `channels:read`, `chat:write`, `reactions:write`, `groups:read`, `users:read`, `users:read.email`). You can create a Slack App and obtain a Bot User OAuth Token from the "OAuth & Permissions" page in your Slack App settings. -* **Slack Team ID:** The ID of your Slack workspace (starts with `T`). You can often find this in URLs or by using Slack API methods. - -## Setup - -You can run the server using Docker (recommended) or locally. - -### Docker (Recommended) - -1. **Create Environment File:** - Create a file named `.env` in the `mcp_servers/slack` directory with the following content: - ```env - # Required: Your Slack Workspace/Team ID - SLACK_TEAM_ID=TXXXXXXXXXX - - # Optional: Your Slack Bot OAuth Token - # If provided, this takes precedence over the x-auth-token header - SLACK_AUTH_TOKEN=xoxb-your-token-here - ``` - Replace `TXXXXXXXXXX` with your actual Slack Team ID. If `SLACK_AUTH_TOKEN` is not set, - the Slack Bot Token must be provided via the `x-auth-token` header with each request (see Configuration). - -2. **Build Docker Image:** - Navigate to the root `klavis` directory (one level above `mcp_servers`) in your terminal and run the build command: - ```bash - docker build -t slack-mcp-server -f mcp_servers/slack/Dockerfile . - ``` - *(Make sure the path to the Dockerfile is correct relative to your current directory.)* - -3. **Run Docker Container:** - Run the container, mapping the server's port (5000) to a port on your host machine (e.g., 5000): - ```bash - # Note: The .env file created in step 1 is copied into the image during the build process specified in the Dockerfile. - docker run -p 5000:5000 --name slack-mcp slack-mcp-server - ``` - The server will start and listen on port 5000 inside the container. - -### Local Development - -1. **Clone Repository:** (If you haven't already) - ```bash - # git clone - # cd - ``` - -2. **Navigate to Directory:** - ```bash - cd mcp_servers/slack - ``` - -3. **Create Environment File:** - Create a file named `.env` in this directory as described in Step 1 of the Docker setup: - ```env - # Required: Your Slack Workspace/Team ID - SLACK_TEAM_ID=TXXXXXXXXXX - - # Optional: Your Slack Bot OAuth Token - # If provided, this takes precedence over the x-auth-token header - SLACK_AUTH_TOKEN=xoxb-your-token-here - ``` - -4. **Install Dependencies:** - ```bash - npm install - ``` - -5. **Build and Run:** - This command compiles the TypeScript code and starts the server: - ```bash - npm start - ``` - The server will start and listen on `http://localhost:5000`. - -## Configuration - -* **`SLACK_TEAM_ID` (Environment Variable):** This is required for certain API calls (`getChannels`, `getUsers`) and must be set in the `.env` file (both for Docker build and local run). -* **`SLACK_AUTH_TOKEN` (Environment Variable):** Optional. If set, this Bot Token will be used for all Slack API calls. This takes precedence over the token provided in request headers. -* **Slack Bot Token (Request Header):** If `SLACK_AUTH_TOKEN` is not set, the server expects the Slack Bot Token to be provided in the `x-auth-token` HTTP header for every request made to the `/messages` endpoint. The server uses this token to authenticate with the Slack API for the requested operation. - -## Usage - -MCP clients can connect to this server via Server-Sent Events (SSE) and interact with it: - -1. **Establish SSE Connection:** Clients connect to the `/sse` endpoint (e.g., `http://localhost:5000/sse`). -2. **Send Messages:** Clients send MCP requests (like `call_tool`) as JSON payloads via POST requests to the `/messages?sessionId=` endpoint. -3. **Authentication:** Each POST request to `/messages` **must** include the Slack Bot Token in the `x-auth-token` header. - -Refer to the [MCP SDK documentation](https://github.com/modelcontextprotocol) for details on client implementation. - -## License - -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file (you might need to add one) for details. \ No newline at end of file +A Model Context Protocol (MCP) server for Slack integration. Send messages, manage channels, search conversations, and handle workspace operations using both bot and user tokens. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Slack with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("SLACK", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/slack-mcp-server:latest + + +# Run Slack MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/slack-mcp-server:latest + +# Run Slack MCP Server (no OAuth support) +docker run -p 5000:5000 \ + -e SLACK_BOT_TOKEN=xoxb-your-bot-token \ + -e SLACK_USER_TOKEN=xoxp-your-user-token \ + ghcr.io/klavis-ai/slack-mcp-server:latest +``` + +**OAuth Setup:** For OAuth authentication (recommended), use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys). This handles the complex OAuth flow automatically. + +**Manual Setup:** Alternatively, provide your Slack bot and user tokens directly. + +## šŸ› ļø Available Tools + +### User Tools (User Token) +- **Channel Management**: List channels, get channel history +- **Messaging**: Post messages, reply to threads, add reactions as user +- **User Management**: List users, get user information +- **Search**: Search messages with user permissions + +### Bot Tools (Bot Token) +- **Bot Messaging**: Send messages, reply to threads, add reactions as bot +- **Workspace Operations**: Bot-specific channel and user operations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/slack/bot_tools/__init__.py b/mcp_servers/slack/bot_tools/__init__.py new file mode 100644 index 00000000..66f30ee2 --- /dev/null +++ b/mcp_servers/slack/bot_tools/__init__.py @@ -0,0 +1,12 @@ +from .bot_messages import bot_post_message, bot_reply_to_thread, bot_add_reaction +from .base import bot_token_context + +__all__ = [ + # Bot Messages + "bot_post_message", + "bot_reply_to_thread", + "bot_add_reaction", + + # Base + "bot_token_context", +] diff --git a/mcp_servers/slack/bot_tools/base.py b/mcp_servers/slack/bot_tools/base.py new file mode 100644 index 00000000..a815d783 --- /dev/null +++ b/mcp_servers/slack/bot_tools/base.py @@ -0,0 +1,93 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +SLACK_API_ENDPOINT = "https://slack.com/api" + +# Context variable to store the bot token for each request +bot_token_context: ContextVar[str] = ContextVar('bot_token') + +def get_bot_token() -> str: + """Get the bot authentication token from context.""" + try: + return bot_token_context.get() + except LookupError: + raise RuntimeError("Bot authentication token not found in request context") + +class SlackBotClient: + """Client for Slack API using Bot Bearer Authentication.""" + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Slack API using bot token.""" + api_token = get_bot_token() + + if not api_token: + raise RuntimeError("No bot API token provided. Please set the authentication header.") + + # Slack uses Bearer Authentication + headers = { + "Authorization": f"Bearer {api_token}", + "Content-Type": "application/json; charset=utf-8" + } + + url = f"{SLACK_API_ENDPOINT}/{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + # Check HTTP status + response.raise_for_status() + + # Handle empty responses + if response.status_code == 204 or not response.content: + return {"ok": True} + + try: + json_response = response.json() + + # Check for Slack API errors + if not json_response.get("ok", False): + error_msg = json_response.get("error", "Unknown Slack API error") + logger.error(f"Slack API error: {error_msg}") + raise SlackAPIError(error_msg, json_response) + + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + +async def make_slack_bot_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make an HTTP request to Slack API using bot token.""" + return await SlackBotClient.make_request(method, endpoint, data, params) + +class SlackAPIError(Exception): + """Custom exception for Slack API errors.""" + def __init__(self, message: str, response: Optional[Dict[str, Any]] = None): + super().__init__(message) + self.response = response diff --git a/mcp_servers/slack/bot_tools/bot_messages.py b/mcp_servers/slack/bot_tools/bot_messages.py new file mode 100644 index 00000000..0a21b134 --- /dev/null +++ b/mcp_servers/slack/bot_tools/bot_messages.py @@ -0,0 +1,70 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_slack_bot_request + +# Configure logging +logger = logging.getLogger(__name__) + +# Sends a message to a channel as a bot +# Bot tokens: chat:write +async def bot_post_message( + channel_id: str, + text: str +) -> Dict[str, Any]: + """Post a new message to a Slack channel using bot token.""" + logger.info(f"Executing tool: bot_post_message to channel {channel_id}") + + data = { + "channel": channel_id, + "text": text + } + + try: + return await make_slack_bot_request("POST", "chat.postMessage", data=data) + except Exception as e: + logger.exception(f"Error executing tool bot_post_message: {e}") + raise e + +# Replies to a thread in a channel as a bot +# Bot tokens: chat:write +async def bot_reply_to_thread( + channel_id: str, + thread_ts: str, + text: str +) -> Dict[str, Any]: + """Reply to a specific message thread in Slack using bot token.""" + logger.info(f"Executing tool: bot_reply_to_thread in channel {channel_id}, thread {thread_ts}") + + data = { + "channel": channel_id, + "thread_ts": thread_ts, + "text": text + } + + try: + return await make_slack_bot_request("POST", "chat.postMessage", data=data) + except Exception as e: + logger.exception(f"Error executing tool bot_reply_to_thread: {e}") + raise e + +# Adds a reaction to a message as a bot +# Bot tokens: reactions:write +async def bot_add_reaction( + channel_id: str, + timestamp: str, + reaction: str +) -> Dict[str, Any]: + """Add a reaction emoji to a message using bot token.""" + logger.info(f"Executing tool: bot_add_reaction to message {timestamp} in channel {channel_id}") + + data = { + "channel": channel_id, + "timestamp": timestamp, + "name": reaction + } + + try: + return await make_slack_bot_request("POST", "reactions.add", data=data) + except Exception as e: + logger.exception(f"Error executing tool bot_add_reaction: {e}") + raise e diff --git a/mcp_servers/slack/common/errors.ts b/mcp_servers/slack/common/errors.ts deleted file mode 100644 index 1c41c25b..00000000 --- a/mcp_servers/slack/common/errors.ts +++ /dev/null @@ -1,72 +0,0 @@ -export class SlackError extends Error { - constructor(message: string) { - super(message); - this.name = 'SlackError'; - } -} - -export class SlackValidationError extends SlackError { - response: any; - - constructor(message: string, response?: any) { - super(message); - this.name = 'SlackValidationError'; - this.response = response; - } -} - -export class SlackAuthenticationError extends SlackError { - constructor(message: string) { - super(message); - this.name = 'SlackAuthenticationError'; - } -} - -export class SlackResourceNotFoundError extends SlackError { - constructor(message: string) { - super(message); - this.name = 'SlackResourceNotFoundError'; - } -} - -export class SlackPermissionError extends SlackError { - constructor(message: string) { - super(message); - this.name = 'SlackPermissionError'; - } -} - -export class SlackRateLimitError extends SlackError { - resetAt: Date; - - constructor(message: string, resetAt: Date) { - super(message); - this.name = 'SlackRateLimitError'; - this.resetAt = resetAt; - } -} - -export function isSlackError(error: any): error is SlackError { - return error instanceof SlackError; -} - -export function formatSlackError(error: SlackError): string { - let message = `Slack API Error: ${error.message}`; - - if (error instanceof SlackValidationError) { - message = `Validation Error: ${error.message}`; - if (error.response) { - message += `\nDetails: ${JSON.stringify(error.response)}`; - } - } else if (error instanceof SlackResourceNotFoundError) { - message = `Not Found: ${error.message}`; - } else if (error instanceof SlackAuthenticationError) { - message = `Authentication Failed: ${error.message}`; - } else if (error instanceof SlackPermissionError) { - message = `Permission Denied: ${error.message}`; - } else if (error instanceof SlackRateLimitError) { - message = `Rate Limit Exceeded: ${error.message}\nResets at: ${error.resetAt.toISOString()}`; - } - - return message; -} \ No newline at end of file diff --git a/mcp_servers/slack/index.ts b/mcp_servers/slack/index.ts deleted file mode 100644 index 15401cb7..00000000 --- a/mcp_servers/slack/index.ts +++ /dev/null @@ -1,663 +0,0 @@ -#!/usr/bin/env node -import express from "express"; -import { Server } from "@modelcontextprotocol/sdk/server/index.js"; -import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; -import { - CallToolRequest, - CallToolRequestSchema, - ListToolsRequestSchema, - Tool, -} from "@modelcontextprotocol/sdk/types.js"; -import { z } from 'zod'; -import { isSlackError, formatSlackError } from "./common/errors.js"; -import { AsyncLocalStorage } from "async_hooks"; - - -// Type definitions for tool arguments -interface ListChannelsArgs { - limit?: number; - cursor?: string; -} - -interface PostMessageArgs { - channel_id: string; - text: string; -} - -interface ReplyToThreadArgs { - channel_id: string; - thread_ts: string; - text: string; -} - -interface AddReactionArgs { - channel_id: string; - timestamp: string; - reaction: string; -} - -interface GetChannelHistoryArgs { - channel_id: string; - limit?: number; -} - -interface GetThreadRepliesArgs { - channel_id: string; - thread_ts: string; -} - -interface GetUsersArgs { - cursor?: string; - limit?: number; -} - -interface GetUserProfileArgs { - user_id: string; -} - -// Tool definitions -const listChannelsTool: Tool = { - name: "slack_list_channels", - description: "List public channels in the workspace with pagination", - inputSchema: { - type: "object", - properties: { - limit: { - type: "number", - description: - "Maximum number of channels to return (default 100, max 200)", - default: 100, - }, - cursor: { - type: "string", - description: "Pagination cursor for next page of results", - }, - }, - }, -}; - -const postMessageTool: Tool = { - name: "slack_post_message", - description: "Post a new message to a Slack channel", - inputSchema: { - type: "object", - properties: { - channel_id: { - type: "string", - description: "The ID of the channel to post to", - }, - text: { - type: "string", - description: "The message text to post", - }, - }, - required: ["channel_id", "text"], - }, -}; - -const replyToThreadTool: Tool = { - name: "slack_reply_to_thread", - description: "Reply to a specific message thread in Slack", - inputSchema: { - type: "object", - properties: { - channel_id: { - type: "string", - description: "The ID of the channel containing the thread", - }, - thread_ts: { - type: "string", - description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.", - }, - text: { - type: "string", - description: "The reply text", - }, - }, - required: ["channel_id", "thread_ts", "text"], - }, -}; - -const addReactionTool: Tool = { - name: "slack_add_reaction", - description: "Add a reaction emoji to a message", - inputSchema: { - type: "object", - properties: { - channel_id: { - type: "string", - description: "The ID of the channel containing the message", - }, - timestamp: { - type: "string", - description: "The timestamp of the message to react to", - }, - reaction: { - type: "string", - description: "The name of the emoji reaction (without ::)", - }, - }, - required: ["channel_id", "timestamp", "reaction"], - }, -}; - -const getChannelHistoryTool: Tool = { - name: "slack_get_channel_history", - description: "Get recent messages from a channel", - inputSchema: { - type: "object", - properties: { - channel_id: { - type: "string", - description: "The ID of the channel", - }, - limit: { - type: "number", - description: "Number of messages to retrieve (default 10)", - default: 10, - }, - }, - required: ["channel_id"], - }, -}; - -const getThreadRepliesTool: Tool = { - name: "slack_get_thread_replies", - description: "Get all replies in a message thread", - inputSchema: { - type: "object", - properties: { - channel_id: { - type: "string", - description: "The ID of the channel containing the thread", - }, - thread_ts: { - type: "string", - description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.", - }, - }, - required: ["channel_id", "thread_ts"], - }, -}; - -const getUsersTool: Tool = { - name: "slack_get_users", - description: - "Get a list of all users in the workspace with their basic profile information", - inputSchema: { - type: "object", - properties: { - cursor: { - type: "string", - description: "Pagination cursor for next page of results", - }, - limit: { - type: "number", - description: "Maximum number of users to return (default 100, max 200)", - default: 100, - }, - }, - }, -}; - -const getUserProfileTool: Tool = { - name: "slack_get_user_profile", - description: "Get detailed profile information for a specific user", - inputSchema: { - type: "object", - properties: { - user_id: { - type: "string", - description: "The ID of the user", - }, - }, - required: ["user_id"], - }, -}; - -class SlackClient { - private botHeaders: { Authorization: string; "Content-Type": string }; - - constructor(botToken: string) { - this.botHeaders = { - Authorization: `Bearer ${botToken}`, - "Content-Type": "application/json; charset=utf-8", - }; - } - - // Update token if needed - refreshToken() { - const token = getSlackToken(); - if (token) { - this.botHeaders.Authorization = `Bearer ${token}`; - return true; - } - return false; - } - - // Update existing methods to call refreshToken before making API calls - async getChannels(limit: number = 100, cursor?: string): Promise { - this.refreshToken(); - const params = new URLSearchParams({ - types: "public_channel", - exclude_archived: "true", - limit: Math.min(limit, 200).toString(), - team_id: process.env.SLACK_TEAM_ID!, - }); - - if (cursor) { - params.append("cursor", cursor); - } - - const response = await fetch( - `https://slack.com/api/conversations.list?${params}`, - { headers: this.botHeaders }, - ); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async postMessage(channel_id: string, text: string): Promise { - this.refreshToken(); - const response = await fetch("https://github.com/service/https://slack.com/api/chat.postMessage", { - method: "POST", - headers: this.botHeaders, - body: JSON.stringify({ - channel: channel_id, - text: text, - }), - }); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async postReply( - channel_id: string, - thread_ts: string, - text: string, - ): Promise { - this.refreshToken(); - const response = await fetch("https://github.com/service/https://slack.com/api/chat.postMessage", { - method: "POST", - headers: this.botHeaders, - body: JSON.stringify({ - channel: channel_id, - thread_ts: thread_ts, - text: text, - }), - }); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async addReaction( - channel_id: string, - timestamp: string, - reaction: string, - ): Promise { - this.refreshToken(); - const response = await fetch("https://github.com/service/https://slack.com/api/reactions.add", { - method: "POST", - headers: this.botHeaders, - body: JSON.stringify({ - channel: channel_id, - timestamp: timestamp, - name: reaction, - }), - }); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async getChannelHistory( - channel_id: string, - limit: number = 10, - ): Promise { - this.refreshToken(); - const params = new URLSearchParams({ - channel: channel_id, - limit: limit.toString(), - }); - - const response = await fetch( - `https://slack.com/api/conversations.history?${params}`, - { headers: this.botHeaders }, - ); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async getThreadReplies(channel_id: string, thread_ts: string): Promise { - this.refreshToken(); - const params = new URLSearchParams({ - channel: channel_id, - ts: thread_ts, - }); - - const response = await fetch( - `https://slack.com/api/conversations.replies?${params}`, - { headers: this.botHeaders }, - ); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async getUsers(limit: number = 100, cursor?: string): Promise { - this.refreshToken(); - const params = new URLSearchParams({ - limit: Math.min(limit, 200).toString(), - team_id: process.env.SLACK_TEAM_ID!, - }); - - if (cursor) { - params.append("cursor", cursor); - } - - const response = await fetch(`https://slack.com/api/users.list?${params}`, { - headers: this.botHeaders, - }); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } - - async getUserProfile(user_id: string): Promise { - this.refreshToken(); - const params = new URLSearchParams({ - user: user_id, - include_labels: "true", - }); - - const response = await fetch( - `https://slack.com/api/users.profile.get?${params}`, - { headers: this.botHeaders }, - ); - - const data = await response.json(); - - if (!data.ok) { - throw new Error(`Slack API error: ${data.error}`); - } - - return data; - } -} - -const server = new Server( - { - name: "slack-mcp-server", - version: "0.1.0", - }, - { - capabilities: { - tools: {}, - }, - } -); - -server.setRequestHandler( - ListToolsRequestSchema, - async () => { - return { - tools: [ - listChannelsTool, - postMessageTool, - replyToThreadTool, - addReactionTool, - getChannelHistoryTool, - getThreadRepliesTool, - getUsersTool, - getUserProfileTool, - ], - }; - } -); - -server.setRequestHandler( - CallToolRequestSchema, - async (request: CallToolRequest) => { - try { - // Validate the request parameters - if (!request.params?.name) { - throw new Error("Missing tool name"); - } - - const slackToken = getSlackToken(); - if (!slackToken) { - throw new Error("No valid Slack token found for this instance"); - } - - const slackClient = new SlackClient(slackToken); - - // Process the tool call based on the tool name - switch (request.params.name) { - case "slack_list_channels": { - const args = request.params.arguments as unknown as ListChannelsArgs; - const response = await slackClient.getChannels( - args.limit, - args.cursor, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_post_message": { - const args = request.params.arguments as unknown as PostMessageArgs; - if (!args.channel_id || !args.text) { - throw new Error( - "Missing required arguments: channel_id and text", - ); - } - const response = await slackClient.postMessage( - args.channel_id, - args.text, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_reply_to_thread": { - const args = request.params.arguments as unknown as ReplyToThreadArgs; - if (!args.channel_id || !args.thread_ts || !args.text) { - throw new Error( - "Missing required arguments: channel_id, thread_ts, and text", - ); - } - const response = await slackClient.postReply( - args.channel_id, - args.thread_ts, - args.text, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_add_reaction": { - const args = request.params.arguments as unknown as AddReactionArgs; - if (!args.channel_id || !args.timestamp || !args.reaction) { - throw new Error( - "Missing required arguments: channel_id, timestamp, and reaction", - ); - } - const response = await slackClient.addReaction( - args.channel_id, - args.timestamp, - args.reaction, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_get_channel_history": { - const args = request.params.arguments as unknown as GetChannelHistoryArgs; - if (!args.channel_id) { - throw new Error("Missing required argument: channel_id"); - } - const response = await slackClient.getChannelHistory( - args.channel_id, - args.limit, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_get_thread_replies": { - const args = request.params.arguments as unknown as GetThreadRepliesArgs; - if (!args.channel_id || !args.thread_ts) { - throw new Error( - "Missing required arguments: channel_id and thread_ts", - ); - } - const response = await slackClient.getThreadReplies( - args.channel_id, - args.thread_ts, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_get_users": { - const args = request.params.arguments as unknown as GetUsersArgs; - const response = await slackClient.getUsers( - args.limit, - args.cursor, - ); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - case "slack_get_user_profile": { - const args = request.params.arguments as unknown as GetUserProfileArgs; - if (!args.user_id) { - throw new Error("Missing required argument: user_id"); - } - const response = await slackClient.getUserProfile(args.user_id); - return { - content: [{ type: "text", text: JSON.stringify(response) }], - }; - } - - default: - throw new Error(`Unknown tool: ${request.params.name}`); - } - } catch (error) { - console.error("Error executing tool:", error); - - if (isSlackError(error)) { - throw new Error(formatSlackError(error)); - } - - if (error instanceof z.ZodError) { - throw new Error(`Invalid input: ${JSON.stringify(error.errors)}`); - } - - throw error; - } - } -); - -const app = express(); - -const transports = new Map(); - -// Create AsyncLocalStorage for request context -const asyncLocalStorage = new AsyncLocalStorage<{ - slack_token: string; -}>(); - -function getSlackToken() { - // First check if env var exists - if (process.env.SLACK_AUTH_TOKEN) { - return process.env.SLACK_AUTH_TOKEN; - } - // Fall back to token from request context - return asyncLocalStorage.getStore()!.slack_token; -} - -app.get("/sse", async (req, res) => { - const transport = new SSEServerTransport(`/messages`, res); - - // Set up cleanup when connection closes - res.on('close', async () => { - console.log(`SSE connection closed for transport: ${transport.sessionId}`); - try { - transports.delete(transport.sessionId); - } finally { - } - }); - - transports.set(transport.sessionId, transport); - - await server.connect(transport); - - console.log(`SSE connection established with transport: ${transport.sessionId}`); -}); - -app.post("/messages", async (req, res) => { - const sessionId = req.query.sessionId as string; - - let transport: SSEServerTransport | undefined; - transport = sessionId ? transports.get(sessionId) : undefined; - if (transport) { - const slack_token = req.headers['x-auth-token'] as string; - - asyncLocalStorage.run({ slack_token }, async () => { - await transport.handlePostMessage(req, res); - }); - } else { - console.error(`Transport not found for session ID: ${sessionId}`); - res.status(404).send({ error: "Transport not found" }); - } -}); - -app.listen(5000, () => { - console.log('server running on port 5000'); -}); \ No newline at end of file diff --git a/mcp_servers/slack/requirements.txt b/mcp_servers/slack/requirements.txt new file mode 100644 index 00000000..08250516 --- /dev/null +++ b/mcp_servers/slack/requirements.txt @@ -0,0 +1,6 @@ +mcp==1.11.0 +httpx +click +starlette +uvicorn[standard] +python-dotenv diff --git a/mcp_servers/slack/server.py b/mcp_servers/slack/server.py new file mode 100644 index 00000000..3dd46d96 --- /dev/null +++ b/mcp_servers/slack/server.py @@ -0,0 +1,995 @@ +import contextlib +import base64 +import logging +import os +import json +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +# Import bot tools +from bot_tools import ( + bot_token_context +) +from bot_tools.bot_messages import ( + bot_post_message, + bot_reply_to_thread, + bot_add_reaction +) + +# Import user tools +from user_tools import ( + user_token_context, + list_channels as user_list_channels, + get_channel_history as user_get_channel_history, + invite_users_to_channel, + get_thread_replies +) +from user_tools.user_messages import ( + user_post_message, + user_reply_to_thread, + user_add_reaction +) +from user_tools.search import user_search_messages +from user_tools.users import list_users, user_get_info + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +SLACK_MCP_SERVER_PORT = int(os.getenv("SLACK_MCP_SERVER_PORT", "5000")) + +def extract_access_tokens(request_or_scope) -> tuple[str, str]: + """Extract both bot and user access tokens from x-auth-data header. + Returns (bot_token, user_token) + """ + auth_data = None + + ## ---- for Klavis Cloud ---- ## + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + ## ---- for local development ---- ## + if not auth_data: + # Fall back to environment variables + bot_token = os.getenv("SLACK_BOT_TOKEN", "") + user_token = os.getenv("SLACK_USER_TOKEN", "") + return bot_token, user_token + + try: + # Parse the JSON auth data to extract both tokens + auth_json = json.loads(auth_data) + bot_token = auth_json.get('access_token', '') # Bot token at root level + user_token = auth_json.get('authed_user', {}).get('access_token', '') # User token in authed_user + return bot_token, user_token + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "", "" + +@click.command() +@click.option("--port", default=SLACK_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("slack-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + # ============= USER TOOLS (using user token) ============= + + # User Channels + types.Tool( + name="slack_user_list_channels", + description="List all channels the authenticated user has access to. This includes public channels, private channels the user is a member of, direct messages, and multi-party direct messages.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "number", + "description": "Maximum number of channels to return (default 100, max 200)", + "default": 100, + }, + "cursor": { + "type": "string", + "description": "Pagination cursor for next page of results", + }, + "types": { + "type": "string", + "description": "Mix and match channel types by providing a comma-separated list of any combination of public_channel, private_channel, mpim, im", + "default": "public_channel", + }, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_CHANNEL", "readOnlyHint": True} + ), + ), + types.Tool( + name="slack_get_channel_history", + description="Get recent messages from a channel", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel", + }, + "limit": { + "type": "number", + "description": "Number of messages to retrieve (default 10)", + "default": 10, + }, + }, + "required": ["channel_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_CHANNEL", "readOnlyHint": True} + ), + ), + types.Tool( + name="slack_get_thread_replies", + description="Get all replies in a message thread. This retrieves all messages in a thread, including the parent message. You can extract the channel_id and thread_ts from Slack URLs (e.g., https://workspace.slack.com/archives/C123456/p1234567890123456 becomes channel_id='C123456', thread_ts='1234567890.123456').", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the thread (e.g., 'C1234567890')", + }, + "thread_ts": { + "type": "string", + "description": "The timestamp of the parent message that started the thread (e.g., '1234567890.123456'). This can be extracted from Slack message URLs by converting p1234567890123456 to 1234567890.123456", + }, + "limit": { + "type": "number", + "description": "Maximum number of messages to return (default 10, max 1000)", + "default": 10, + }, + "cursor": { + "type": "string", + "description": "Pagination cursor for next page of results", + }, + "oldest": { + "type": "string", + "description": "Only messages after this Unix timestamp (inclusive)", + }, + "latest": { + "type": "string", + "description": "Only messages before this Unix timestamp (exclusive)", + }, + "inclusive": { + "type": "boolean", + "description": "Include messages with oldest or latest timestamps in results", + }, + }, + "required": ["channel_id", "thread_ts"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_THREAD", "readOnlyHint": True} + ), + ), + types.Tool( + name="slack_invite_users_to_channel", + description="Invite one or more users (including bot users) to a Slack channel. Both regular users and bot users can be invited using their respective user IDs.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel to invite users to (e.g., 'C1234567890')", + }, + "user_ids": { + "type": "array", + "items": { + "type": "string", + }, + "description": "A list of user IDs to invite to the channel. Can include both regular user IDs and bot user IDs", + }, + }, + "required": ["channel_id", "user_ids"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_CHANNEL"} + ), + ), + + # User Info + types.Tool( + name="slack_list_users", + description="Lists all users in a Slack team using user token", + inputSchema={ + "type": "object", + "properties": { + "cursor": { + "type": "string", + "description": "Pagination cursor for getting more results", + }, + "limit": { + "type": "integer", + "description": "Maximum number of users to return (default 100, max 200)", + }, + "team_id": { + "type": "string", + "description": "Team ID to list users from (for Enterprise Grid)", + }, + "include_locale": { + "type": "boolean", + "description": "Whether to include locale information for each user", + }, + }, + "required": [], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_USER", "readOnlyHint": True} + ), + ), + types.Tool( + name="slack_user_get_info", + description="Gets information about a user", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "The ID of the user to get information for (e.g., 'U1234567890')", + }, + "include_locale": { + "type": "boolean", + "description": "Whether to include locale information for the user", + }, + }, + "required": ["user_id"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_USER", "readOnlyHint": True} + ), + ), + + # User Search + types.Tool( + name="slack_user_search_messages", + description="Searches for messages matching a query.", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The search query string. You can use Slack's search operators like 'in:#channel', 'from:@user', 'before:YYYY-MM-DD', 'after:YYYY-MM-DD', etc.", + }, + "channel_ids": { + "type": "array", + "items": { + "type": "string", + }, + "description": "Optional list of channel IDs to search within. If not provided, searches across all accessible channels.", + }, + "sort": { + "type": "string", + "enum": ["score", "timestamp"], + "description": "Sort results by relevance (score) or date (timestamp). Default is score.", + "default": "score", + }, + "sort_dir": { + "type": "string", + "enum": ["asc", "desc"], + "description": "Sort direction. Default is desc (newest/most relevant first).", + "default": "desc", + }, + "count": { + "type": "number", + "description": "Number of results to return per page (default 20, max 100)", + "default": 20, + }, + "cursor": { + "type": "string", + "description": "Pagination cursor for next page of results", + }, + "highlight": { + "type": "boolean", + "description": "Whether to include highlighting of matched terms", + "default": True, + }, + }, + "required": ["query"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_MESSAGE", "readOnlyHint": True} + ), + ), + + # User Messages + types.Tool( + name="slack_user_post_message", + description="Post a new message to a Slack channel as a user.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the Slack channel (e.g., 'C1234567890')", + }, + "text": { + "type": "string", + "description": "The message text to post", + }, + }, + "required": ["channel_id", "text"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_MESSAGE"} + ), + ), + types.Tool( + name="slack_user_reply_to_thread", + description="Reply to a specific message thread in Slack as a user.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the Slack channel (e.g., 'C1234567890')", + }, + "thread_ts": { + "type": "string", + "description": "The timestamp of the parent message to reply to", + }, + "text": { + "type": "string", + "description": "The message text to post as a reply", + }, + }, + "required": ["channel_id", "thread_ts", "text"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_MESSAGE"} + ), + ), + types.Tool( + name="slack_user_add_reaction", + description="Add a reaction emoji to a message as a user.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the Slack channel (e.g., 'C1234567890')", + }, + "timestamp": { + "type": "string", + "description": "The timestamp of the message to react to", + }, + "reaction": { + "type": "string", + "description": "The name of the emoji reaction (without colons, e.g., 'thumbsup', 'heart')", + }, + }, + "required": ["channel_id", "timestamp", "reaction"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_REACTION"} + ), + ), + + # ============= BOT TOOLS (using bot token) ============= + + # Bot Messages + types.Tool( + name="slack_bot_post_message", + description="Post a new message to a Slack channel as a bot.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel to post to", + }, + "text": { + "type": "string", + "description": "The message text to post", + }, + }, + "required": ["channel_id", "text"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_MESSAGE"} + ), + ), + types.Tool( + name="slack_bot_reply_to_thread", + description="Reply to a specific message thread in Slack as a bot.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the thread", + }, + "thread_ts": { + "type": "string", + "description": "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.", + }, + "text": { + "type": "string", + "description": "The reply text", + }, + }, + "required": ["channel_id", "thread_ts", "text"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_MESSAGE"} + ), + ), + types.Tool( + name="slack_bot_add_reaction", + description="Add a reaction emoji to a message as a bot.", + inputSchema={ + "type": "object", + "properties": { + "channel_id": { + "type": "string", + "description": "The ID of the channel containing the message", + }, + "timestamp": { + "type": "string", + "description": "The timestamp of the message to react to", + }, + "reaction": { + "type": "string", + "description": "The name of the emoji reaction (without ::)", + }, + }, + "required": ["channel_id", "timestamp", "reaction"], + }, + annotations=types.ToolAnnotations( + **{"category": "SLACK_REACTION"} + ), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + + # ============= USER TOOLS (using user token) ============= + + # User Channels + if name == "slack_user_list_channels": + limit = arguments.get("limit") + cursor = arguments.get("cursor") + types_param = arguments.get("types") + + try: + result = await user_list_channels(limit, cursor, types_param) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_get_channel_history": + channel_id = arguments.get("channel_id") + if not channel_id: + return [ + types.TextContent( + type="text", + text="Error: channel_id parameter is required", + ) + ] + + limit = arguments.get("limit") + + try: + result = await user_get_channel_history(channel_id, limit) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_get_thread_replies": + channel_id = arguments.get("channel_id") + thread_ts = arguments.get("thread_ts") + + if not channel_id: + return [ + types.TextContent( + type="text", + text="Error: channel_id parameter is required", + ) + ] + + if not thread_ts: + return [ + types.TextContent( + type="text", + text="Error: thread_ts parameter is required", + ) + ] + + limit = arguments.get("limit") + cursor = arguments.get("cursor") + oldest = arguments.get("oldest") + latest = arguments.get("latest") + inclusive = arguments.get("inclusive") + + try: + result = await get_thread_replies( + channel_id, + thread_ts, + limit, + cursor, + oldest, + latest, + inclusive + ) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_invite_users_to_channel": + channel_id = arguments.get("channel_id") + user_ids = arguments.get("user_ids") + + if not channel_id: + return [ + types.TextContent( + type="text", + text="Error: channel_id parameter is required", + ) + ] + + if not user_ids or not isinstance(user_ids, list) or len(user_ids) == 0: + return [ + types.TextContent( + type="text", + text="Error: user_ids parameter is required and must be a non-empty list", + ) + ] + + try: + result = await invite_users_to_channel(channel_id, user_ids) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # User Info + elif name == "slack_list_users": + cursor = arguments.get("cursor") + limit = arguments.get("limit") + team_id = arguments.get("team_id") + include_locale = arguments.get("include_locale") + + try: + result = await list_users(cursor, limit, team_id, include_locale) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_user_get_info": + user_id = arguments.get("user_id") + if not user_id: + return [ + types.TextContent( + type="text", + text="Error: user_id parameter is required", + ) + ] + + include_locale = arguments.get("include_locale") + + try: + result = await user_get_info(user_id, include_locale) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # User Search + elif name == "slack_user_search_messages": + query = arguments.get("query") + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + channel_ids = arguments.get("channel_ids") + sort = arguments.get("sort") + sort_dir = arguments.get("sort_dir") + count = arguments.get("count") + cursor = arguments.get("cursor") + highlight = arguments.get("highlight") + + try: + result = await user_search_messages(query, channel_ids, sort, sort_dir, count, cursor, highlight) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # User Messages + elif name == "slack_user_post_message": + channel_id = arguments.get("channel_id") + text = arguments.get("text") + if not channel_id or not text: + return [ + types.TextContent( + type="text", + text="Error: channel_id and text parameters are required", + ) + ] + + try: + result = await user_post_message(channel_id, text) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_user_reply_to_thread": + channel_id = arguments.get("channel_id") + thread_ts = arguments.get("thread_ts") + text = arguments.get("text") + if not channel_id or not thread_ts or not text: + return [ + types.TextContent( + type="text", + text="Error: channel_id, thread_ts, and text parameters are required", + ) + ] + + try: + result = await user_reply_to_thread(channel_id, thread_ts, text) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_user_add_reaction": + channel_id = arguments.get("channel_id") + timestamp = arguments.get("timestamp") + reaction = arguments.get("reaction") + if not channel_id or not timestamp or not reaction: + return [ + types.TextContent( + type="text", + text="Error: channel_id, timestamp, and reaction parameters are required", + ) + ] + + try: + result = await user_add_reaction(channel_id, timestamp, reaction) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # ============= BOT TOOLS (using bot token) ============= + + # Bot Messages + elif name == "slack_bot_post_message": + channel_id = arguments.get("channel_id") + text = arguments.get("text") + if not channel_id or not text: + return [ + types.TextContent( + type="text", + text="Error: channel_id and text parameters are required", + ) + ] + + try: + result = await bot_post_message(channel_id, text) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_bot_reply_to_thread": + channel_id = arguments.get("channel_id") + thread_ts = arguments.get("thread_ts") + text = arguments.get("text") + if not channel_id or not thread_ts or not text: + return [ + types.TextContent( + type="text", + text="Error: channel_id, thread_ts, and text parameters are required", + ) + ] + + try: + result = await bot_reply_to_thread(channel_id, thread_ts, text) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "slack_bot_add_reaction": + channel_id = arguments.get("channel_id") + timestamp = arguments.get("timestamp") + reaction = arguments.get("reaction") + if not channel_id or not timestamp or not reaction: + return [ + types.TextContent( + type="text", + text="Error: channel_id, timestamp, and reaction parameters are required", + ) + ] + + try: + result = await bot_add_reaction(channel_id, timestamp, reaction) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + else: + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract both bot and user tokens from headers + bot_token, user_token = extract_access_tokens(request) + + # Set both tokens in context for this request + bot_token_ctx = bot_token_context.set(bot_token) + user_token_ctx = user_token_context.set(user_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + bot_token_context.reset(bot_token_ctx) + user_token_context.reset(user_token_ctx) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract both bot and user tokens from headers + bot_token, user_token = extract_access_tokens(scope) + + # Set both tokens in context for this request + bot_token_ctx = bot_token_context.set(bot_token) + user_token_ctx = user_token_context.set(user_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + bot_token_context.reset(bot_token_ctx) + user_token_context.reset(user_token_ctx) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + +if __name__ == "__main__": + main() diff --git a/mcp_servers/slack/tsconfig.json b/mcp_servers/slack/tsconfig.json deleted file mode 100644 index 89edf25a..00000000 --- a/mcp_servers/slack/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "." - }, - "include": ["./**/*.ts"] -} diff --git a/mcp_servers/slack/user_tools/__init__.py b/mcp_servers/slack/user_tools/__init__.py new file mode 100644 index 00000000..04e845f9 --- /dev/null +++ b/mcp_servers/slack/user_tools/__init__.py @@ -0,0 +1,31 @@ +from .search import user_search_messages +from .user_messages import user_post_message, user_reply_to_thread, user_add_reaction +from .channels import list_channels, get_channel_history, invite_users_to_channel +from .users import list_users, user_get_info +from .threads import get_thread_replies +from .base import user_token_context + +__all__ = [ + # User Search + "user_search_messages", + + # User Messages + "user_post_message", + "user_reply_to_thread", + "user_add_reaction", + + # Channels + "list_channels", + "get_channel_history", + "invite_users_to_channel", + + # Threads + "get_thread_replies", + + # Users + "list_users", + "user_get_info", + + # Base + "user_token_context", +] \ No newline at end of file diff --git a/mcp_servers/slack/user_tools/base.py b/mcp_servers/slack/user_tools/base.py new file mode 100644 index 00000000..93408b15 --- /dev/null +++ b/mcp_servers/slack/user_tools/base.py @@ -0,0 +1,93 @@ +import logging +from typing import Any, Dict, Optional +from contextvars import ContextVar +import httpx + +# Configure logging +logger = logging.getLogger(__name__) + +SLACK_API_ENDPOINT = "https://slack.com/api" + +# Context variable to store user token for each request +user_token_context: ContextVar[str] = ContextVar('user_token') + +def get_user_token() -> str: + """Get the user authentication token from context.""" + try: + return user_token_context.get() + except LookupError: + raise RuntimeError("User authentication token not found in request context") + +class SlackUserClient: + """Client for Slack API using User Bearer Authentication.""" + + @staticmethod + async def make_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Make an HTTP request to Slack API using user token.""" + api_token = get_user_token() + + if not api_token: + raise RuntimeError("No user API token provided. Please set the authentication header.") + + # Slack uses Bearer Authentication + headers = { + "Authorization": f"Bearer {api_token}", + "Content-Type": "application/json; charset=utf-8" + } + + url = f"{SLACK_API_ENDPOINT}/{endpoint}" + + async with httpx.AsyncClient() as client: + if method.upper() == "GET": + response = await client.get(url, headers=headers, params=params) + elif method.upper() == "POST": + response = await client.post(url, headers=headers, json=data) + elif method.upper() == "PUT": + response = await client.put(url, headers=headers, json=data) + elif method.upper() == "DELETE": + response = await client.delete(url, headers=headers) + else: + raise ValueError(f"Unsupported HTTP method: {method}") + + # Check HTTP status + response.raise_for_status() + + # Handle empty responses + if response.status_code == 204 or not response.content: + return {"ok": True} + + try: + json_response = response.json() + + # Check for Slack API errors + if not json_response.get("ok", False): + error_msg = json_response.get("error", "Unknown Slack API error") + logger.error(f"Slack API error: {error_msg}") + raise SlackAPIError(error_msg, json_response) + + return json_response + except ValueError as e: + # Handle cases where response content exists but isn't valid JSON + logger.error(f"Failed to parse JSON response: {e}") + logger.error(f"Response content: {response.content}") + return {"error": "Invalid JSON response", "content": response.text} + +async def make_slack_user_request( + method: str, + endpoint: str, + data: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Make an HTTP request to Slack API using user token.""" + return await SlackUserClient.make_request(method, endpoint, data, params) + +class SlackAPIError(Exception): + """Custom exception for Slack API errors.""" + def __init__(self, message: str, response: Optional[Dict[str, Any]] = None): + super().__init__(message) + self.response = response \ No newline at end of file diff --git a/mcp_servers/slack/user_tools/channels.py b/mcp_servers/slack/user_tools/channels.py new file mode 100644 index 00000000..9471dc4a --- /dev/null +++ b/mcp_servers/slack/user_tools/channels.py @@ -0,0 +1,114 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_slack_user_request + +# Configure logging +logger = logging.getLogger(__name__) + +# list_channels returns all channels that the user has access to +# User tokens: channels:read, groups:read, im:read, mpim:read +async def list_channels( + limit: Optional[int] = None, + cursor: Optional[str] = None, + types: Optional[str] = None +) -> Dict[str, Any]: + """List all channels the authenticated user has access to. + + This uses the user token to list channels, which means it can access: + - Public channels in the workspace + - Private channels the user is a member of + - Direct messages (DMs) + - Multi-party direct messages (group DMs) + + Args: + limit: Maximum number of channels to return (default 100, max 200) + cursor: Pagination cursor for next page of results + types: Channel types to include (public_channel, private_channel, mpim, im) + + Returns: + Dictionary containing the list of channels and pagination metadata + """ + logger.info("Executing tool: slack_user_list_channels") + + params = { + "exclude_archived": "true", + } + + if limit: + params["limit"] = str(min(limit, 200)) + else: + params["limit"] = "100" + + if cursor: + params["cursor"] = cursor + + if types: + params["types"] = types + else: + params["types"] = "public_channel" + + try: + return await make_slack_user_request("GET", "conversations.list", params=params) + except Exception as e: + logger.exception(f"Error executing tool slack_user_list_channels: {e}") + raise e + +# get_channel_history returns the most recent messages from a channel +# User tokens: channels:history, groups:history, im:history, mpim:history +async def get_channel_history( + channel_id: str, + limit: Optional[int] = None +) -> Dict[str, Any]: + """Get recent messages from a channel.""" + logger.info(f"Executing tool: slack_get_channel_history for channel {channel_id}") + + params = { + "channel": channel_id, + } + + if limit: + params["limit"] = str(limit) + else: + params["limit"] = "10" + + try: + return await make_slack_user_request("GET", "conversations.history", params=params) + except Exception as e: + logger.exception(f"Error executing tool slack_get_channel_history: {e}") + raise e + +# invite_users_to_channel invites users to a channel +# User tokens: channels:write.invites, groups:write.invites, im:write.invites, mpim:write.invites +async def invite_users_to_channel( + channel_id: str, + user_ids: list[str] +) -> Dict[str, Any]: + """Invite one or more users (including bot users) to a channel. + + This uses the user token to invite users to a channel. The authenticated user must have + permission to invite users to the specified channel. Both regular users and bot users + can be invited using their respective user IDs. + + Args: + channel_id: The ID of the channel to invite users to (e.g., 'C1234567890') + user_ids: A list of user IDs to invite (e.g., ['U1234567890', 'U9876543210']) + + Returns: + Dictionary containing the updated channel information + """ + logger.info(f"Executing tool: slack_invite_users_to_channel for channel {channel_id}") + + if not user_ids: + raise ValueError("At least one user ID must be provided") + + # Slack API expects comma-separated user IDs + data = { + "channel": channel_id, + "users": ",".join(user_ids) + } + + try: + return await make_slack_user_request("POST", "conversations.invite", data=data) + except Exception as e: + logger.exception(f"Error executing tool slack_invite_users_to_channel: {e}") + raise e diff --git a/mcp_servers/slack/user_tools/search.py b/mcp_servers/slack/user_tools/search.py new file mode 100644 index 00000000..8fcac3b4 --- /dev/null +++ b/mcp_servers/slack/user_tools/search.py @@ -0,0 +1,60 @@ +import logging +from typing import Any, Dict, Optional, List +from .base import make_slack_user_request + +# Configure logging +logger = logging.getLogger(__name__) + +# user_search_messages searches for messages in the workspace using user token (includes private channels and DMs) +# User tokens: search:read +async def user_search_messages( + query: str, + channel_ids: Optional[List[str]] = None, + sort: Optional[str] = None, + sort_dir: Optional[str] = None, + count: Optional[int] = None, + cursor: Optional[str] = None, + highlight: Optional[bool] = None +) -> Dict[str, Any]: + """Search for messages in the workspace using user token (includes private channels and DMs).""" + logger.info(f"Executing tool: user_search_messages with query: {query}") + + # Construct the query with channel filters if provided + search_query = query + if channel_ids and len(channel_ids) > 0: + # Add channel filters to the query + channels_filter = " ".join([f"in:{channel_id}" for channel_id in channel_ids]) + search_query = f"{query} {channels_filter}" + + params = { + "query": search_query, + } + + if count: + params["count"] = str(min(count, 100)) + else: + params["count"] = "20" + + if highlight is not None: + params["highlight"] = "1" if highlight else "0" + else: + params["highlight"] = "1" + + if sort: + params["sort"] = sort + else: + params["sort"] = "score" + + if sort_dir: + params["sort_dir"] = sort_dir + else: + params["sort_dir"] = "desc" + + if cursor: + params["cursor"] = cursor + + try: + return await make_slack_user_request("GET", "search.messages", params=params) + except Exception as e: + logger.exception(f"Error executing tool user_search_messages: {e}") + raise e diff --git a/mcp_servers/slack/user_tools/threads.py b/mcp_servers/slack/user_tools/threads.py new file mode 100644 index 00000000..2fe6060b --- /dev/null +++ b/mcp_servers/slack/user_tools/threads.py @@ -0,0 +1,79 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_slack_user_request + +logger = logging.getLogger(__name__) + +# get_thread_replies returns all replies in a message thread +# User tokens: channels:history, groups:history, im:history, mpim:history +async def get_thread_replies( + channel_id: str, + thread_ts: str, + limit: Optional[int] = None, + cursor: Optional[str] = None, + oldest: Optional[str] = None, + latest: Optional[str] = None, + inclusive: Optional[bool] = None +) -> Dict[str, Any]: + """Get all replies in a message thread. + + This retrieves all messages in a thread, including the parent message. + Works with public channels, private channels, DMs, and group DMs that + the authenticated user has access to. + + Args: + channel_id: The ID of the channel containing the thread (e.g., 'C1234567890') + thread_ts: The timestamp of the parent message that started the thread (e.g., '1234567890.123456') + limit: Maximum number of messages to return (default 10, max 1000) + cursor: Pagination cursor for next page of results + oldest: Only messages after this Unix timestamp (inclusive) + latest: Only messages before this Unix timestamp (exclusive) + inclusive: Include messages with oldest or latest timestamps in results + + Returns: + Dictionary containing: + - messages: List of messages in the thread (includes parent as first message) + - has_more: Boolean indicating if there are more messages + - response_metadata: Contains cursor for pagination if has_more is True + + Examples: + # Get a thread from a Slack URL like: + # https://workspace.slack.com/archives/C123456/p1234567890123456 + # Parse to: channel_id='C123456', thread_ts='1234567890.123456' + + result = await get_thread_replies( + channel_id='C123456', + thread_ts='1234567890.123456', + limit=50 + ) + """ + logger.info(f"Executing tool: get_thread_replies for channel {channel_id}, thread {thread_ts}") + + params = { + "channel": channel_id, + "ts": thread_ts, + } + + if limit: + params["limit"] = str(min(limit, 1000)) + else: + params["limit"] = "10" + + if cursor: + params["cursor"] = cursor + + if oldest: + params["oldest"] = oldest + + if latest: + params["latest"] = latest + + if inclusive is not None: + params["inclusive"] = "true" if inclusive else "false" + + try: + return await make_slack_user_request("GET", "conversations.replies", params=params) + except Exception as e: + logger.exception(f"Error executing tool get_thread_replies: {e}") + raise e + diff --git a/mcp_servers/slack/user_tools/user_messages.py b/mcp_servers/slack/user_tools/user_messages.py new file mode 100644 index 00000000..2158d02b --- /dev/null +++ b/mcp_servers/slack/user_tools/user_messages.py @@ -0,0 +1,70 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_slack_user_request + +# Configure logging +logger = logging.getLogger(__name__) + +# Sends a message to a channel as a user +# User tokens: chat:write, chat:write:user, chat:write:bot +async def user_post_message( + channel_id: str, + text: str +) -> Dict[str, Any]: + """Post a new message to a Slack channel using user token.""" + logger.info(f"Executing tool: user_post_message to channel {channel_id}") + + data = { + "channel": channel_id, + "text": text + } + + try: + return await make_slack_user_request("POST", "chat.postMessage", data=data) + except Exception as e: + logger.exception(f"Error executing tool user_post_message: {e}") + raise e + +# Replies to a thread in a channel as a user +# User tokens: chat:write, chat:write:user, chat:write:bot +async def user_reply_to_thread( + channel_id: str, + thread_ts: str, + text: str +) -> Dict[str, Any]: + """Reply to a specific message thread in Slack using user token.""" + logger.info(f"Executing tool: user_reply_to_thread in channel {channel_id}, thread {thread_ts}") + + data = { + "channel": channel_id, + "thread_ts": thread_ts, + "text": text + } + + try: + return await make_slack_user_request("POST", "chat.postMessage", data=data) + except Exception as e: + logger.exception(f"Error executing tool user_reply_to_thread: {e}") + raise e + +# Adds a reaction to a message as a user +# User tokens: reactions:write +async def user_add_reaction( + channel_id: str, + timestamp: str, + reaction: str +) -> Dict[str, Any]: + """Add a reaction emoji to a message using user token.""" + logger.info(f"Executing tool: user_add_reaction to message {timestamp} in channel {channel_id}") + + data = { + "channel": channel_id, + "timestamp": timestamp, + "name": reaction + } + + try: + return await make_slack_user_request("POST", "reactions.add", data=data) + except Exception as e: + logger.exception(f"Error executing tool user_add_reaction: {e}") + raise e diff --git a/mcp_servers/slack/user_tools/users.py b/mcp_servers/slack/user_tools/users.py new file mode 100644 index 00000000..dc6ce410 --- /dev/null +++ b/mcp_servers/slack/user_tools/users.py @@ -0,0 +1,66 @@ +import logging +from typing import Any, Dict, Optional +from .base import make_slack_user_request + +# Configure logging +logger = logging.getLogger(__name__) + +# Lists all users in a Slack team. +# User tokens: users:read +async def list_users( + cursor: Optional[str] = None, + limit: Optional[int] = None, + team_id: Optional[str] = None, + include_locale: Optional[bool] = None +) -> Dict[str, Any]: + """Lists all users in a Slack team using users.list API.""" + logger.info("Executing tool: list_users") + + params = {} + + # Set limit (max 200 per page according to Slack API) + if limit: + params["limit"] = str(min(limit, 200)) + else: + params["limit"] = "100" + + # Add cursor for pagination + if cursor: + params["cursor"] = cursor + + # Add team_id if provided (for Enterprise Grid) + if team_id: + params["team_id"] = team_id + + # Include locale information + if include_locale is not None: + params["include_locale"] = str(include_locale).lower() + + try: + return await make_slack_user_request("GET", "users.list", params=params) + except Exception as e: + logger.exception(f"Error executing tool list_users: {e}") + raise e + +# Gets information about a user. +# User tokens: users:read +async def user_get_info( + user_id: str, + include_locale: Optional[bool] = None +) -> Dict[str, Any]: + """Gets information about a user using users.info API.""" + logger.info(f"Executing tool: user_get_info for user {user_id}") + + params = { + "user": user_id + } + + # Include locale information + if include_locale is not None: + params["include_locale"] = str(include_locale).lower() + + try: + return await make_slack_user_request("GET", "users.info", params=params) + except Exception as e: + logger.exception(f"Error executing tool user_get_info: {e}") + raise e diff --git a/mcp_servers/spotify/.env.example b/mcp_servers/spotify/.env.example new file mode 100644 index 00000000..d88f5fa1 --- /dev/null +++ b/mcp_servers/spotify/.env.example @@ -0,0 +1,4 @@ +# Port for the MCP server to listen on +SPOTIFY_MCP_SERVER_PORT=5000 +SPOTIFY_CLIENT_ID= +SPOTIFY_CLIENT_SECRET= \ No newline at end of file diff --git a/mcp_servers/spotify/Dockerfile b/mcp_servers/spotify/Dockerfile new file mode 100644 index 00000000..090503e4 --- /dev/null +++ b/mcp_servers/spotify/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY mcp_servers/spotify/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY mcp_servers/spotify/server.py . +COPY mcp_servers/spotify/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/spotify/README.md b/mcp_servers/spotify/README.md new file mode 100644 index 00000000..c2b32965 --- /dev/null +++ b/mcp_servers/spotify/README.md @@ -0,0 +1,77 @@ +# Spotify MCP Server + +A Model Context Protocol (MCP) server for Spotify integration. Control playback, manage playlists, and access music data using Spotify's Web API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Spotify with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("SPOTIFY", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/spotify-mcp-server:latest + + +# Run Spotify MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/spotify-mcp-server:latest + +# Run Spotify MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_spotify_access_token_here"}' \ + ghcr.io/klavis-ai/spotify-mcp-server:latest +``` + +**OAuth Setup:** Spotify requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Playback Control**: Control Spotify playback, play, pause, skip tracks +- **Playlist Management**: Create, update, and manage Spotify playlists +- **Search**: Search for tracks, albums, artists, and playlists +- **User Library**: Access and manage user's saved music +- **Recommendations**: Get personalized music recommendations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/spotify/requirements.txt b/mcp_servers/spotify/requirements.txt new file mode 100644 index 00000000..36068a1f --- /dev/null +++ b/mcp_servers/spotify/requirements.txt @@ -0,0 +1,9 @@ +mcp==1.11.0 +pydantic +typing-extensions +aiohttp +click +python-dotenv +starlette +uvicorn[standard] +spotipy \ No newline at end of file diff --git a/mcp_servers/spotify/server.py b/mcp_servers/spotify/server.py new file mode 100644 index 00000000..a88676f7 --- /dev/null +++ b/mcp_servers/spotify/server.py @@ -0,0 +1,2055 @@ +import contextlib +import json +import logging +import os +from collections.abc import AsyncIterator +from typing import Any, Dict +from contextvars import ContextVar + +import click +import mcp.types as types +from dotenv import load_dotenv +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + get_spotify_access_token, + get_spotify_client, + search_tracks, + auth_token_context, + get_tracks_info, + get_user_spotify_client, + get_user_saved_tracks, + check_user_saved_tracks, + save_tracks_for_current_user, + remove_user_saved_tracks, + get_albums_info, + get_album_tracks, + get_user_saved_albums, + save_albums_for_current_user, + remove_albums_for_current_user, + check_user_saved_albums, + get_artists_info, + get_artist_albums, + get_artist_top_tracks, + get_episodes_info, + save_episodes_for_current_user, + get_user_saved_episodes, + remove_episodes_for_current_user, + check_user_saved_episodes, + get_playlist_by_id, + get_user_owned_playlists, + update_playlist_details, + get_current_user_profile, + get_current_user_top_items, + get_spotify_user_public_profile, + follow_playlist, + unfollow_playlist, + get_current_user_followed_artists, + follow_artists_or_users, + unfollow_artists_or_users, + check_user_follows, + add_items_to_playlist, + remove_items_from_playlist, + get_current_user_playlists, + get_multiple_shows, + get_show_episodes, + get_current_user_saved_shows, + save_shows_to_user_library, + remove_shows_from_user_library, + check_user_saved_shows, +) + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +SPOTIFY_MCP_SERVER_PORT = int(os.getenv("SPOTIFY_MCP_SERVER_PORT", "5000")) + + +@click.command() +@click.option( + "--port", + default=SPOTIFY_MCP_SERVER_PORT, + help="Port to listen on for HTTP" +) +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)" +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams" +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("spotify-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="spotify_search_tracks", + description="Search for tracks, albums, artists, playlists, shows or episodes on Spotify", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query for (track, album, artist, playlist, show, episode)" + }, + "type": { + "type": "string", + "description": "Type of search: track, album, artist, playlist, show, episode, or audiobook", + "enum": ["track", "album", "artist", "playlist", "show", "episode", "audiobook"] + }, + "limit": { + "type": "integer", + "description": "Number of results to return (default: 10)", + "default": 10 + } + }, + "required": ["query", "type"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_track_info", + description="Get detailed information about a specific track", + inputSchema={ + "type": "object", + "properties": { + "ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify track IDs to retrieve information for" + } + }, + "required": ["ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_user_saved_tracks", + description="Get the user's saved tracks (liked/saved songs) from Spotify", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of tracks to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first track to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_check_user_saved_tracks", + description="Check if a track or multiple tracks is saved in the user's library", + inputSchema={ + "type": "object", + "properties": { + "track_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify track IDs to check" + } + }, + "required": ["track_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_save_tracks_for_current_user", + description="Save tracks to the user's library", + inputSchema={ + "type": "object", + "properties": { + "track_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify track IDs to save" + } + }, + "required": ["track_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK"}) + ), + types.Tool( + name="spotify_remove_user_saved_tracks", + description="Remove tracks from the user's library", + inputSchema={ + "type": "object", + "properties": { + "track_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify track IDs to remove" + } + }, + "required": ["track_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_TRACK"}) + ), + types.Tool( + name="spotify_get_albums_info", + description="Get detailed information about one or multiple albums", + inputSchema={ + "type": "object", + "properties": { + "album_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify album IDs to retrieve information for" + } + }, + "required": ["album_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_album_tracks", + description="Get detailed information about tracks in a specific album", + inputSchema={ + "type": "object", + "properties": { + "album_id": { + "type": "string", + "description": "Spotify album ID to retrieve tracks for" + } + }, + "required": ["album_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_user_saved_albums", + description="Get the user's saved albums from Spotify", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of albums to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first album to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_save_albums_for_current_user", + description="Save albums to the user's library", + inputSchema={ + "type": "object", + "properties": { + "album_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify album IDs to save" + } + }, + "required": ["album_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM"}) + ), + types.Tool( + name="spotify_remove_albums_for_current_user", + description="Remove albums from the user's library", + inputSchema={ + "type": "object", + "properties": { + "album_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify album IDs to remove" + } + }, + "required": ["album_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM"}) + ), + types.Tool( + name="spotify_check_user_saved_albums", + description="Check if an album or multiple albums is saved in the user's library", + inputSchema={ + "type": "object", + "properties": { + "album_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify album IDs to check" + } + }, + "required": ["album_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ALBUM", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_artists_info", + description="Get detailed information about one or multiple artists", + inputSchema={ + "type": "object", + "properties": { + "artist_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify artist IDs to retrieve information for" + } + }, + "required": ["artist_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ARTIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_artist_albums", + description="Get detailed information about albums by a specific artist", + inputSchema={ + "type": "object", + "properties": { + "artist_id": { + "type": "string", + "description": "Spotify artist ID to retrieve albums for" + } + }, + "required": ["artist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ARTIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_artist_top_tracks", + description="Get the top tracks of a specific artist by country", + inputSchema={ + "type": "object", + "properties": { + "artist_id": { + "type": "string", + "description": "Spotify artist ID to retrieve top tracks for" + }, + "country": { + "type": "string", + "description": "2-letter country code (e.g., 'US', 'GB', 'IN')" + } + }, + "required": ["artist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_ARTIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_episodes_info", + description="Get detailed information about one or multiple podcast episodes", + inputSchema={ + "type": "object", + "properties": { + "episode_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify episode IDs to retrieve information for" + }, + "market": { + "type": "string", + "description": "Country market to restrict results (ISO 3166-1 alpha-2, default: None)", + "default": None + } + }, + "required": ["episode_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_EPISODE", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_save_episodes_for_current_user", + description="Save podcast episodes to the user's library", + inputSchema={ + "type": "object", + "properties": { + "episode_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify episode IDs to save" + } + }, + "required": ["episode_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_EPISODE"}) + ), + types.Tool( + name="spotify_get_user_saved_episodes", + description="Get the user's saved podcast episodes from Spotify", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of episodes to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first episode to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_EPISODE", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_remove_episodes_for_current_user", + description="Remove podcast episodes from the user's library", + inputSchema={ + "type": "object", + "properties": { + "episode_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify episode IDs to remove" + } + }, + "required": ["episode_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_EPISODE"}) + ), + types.Tool( + name="spotify_check_user_saved_episodes", + description="Check if a podcast episode or multiple episodes is saved in the user's library", + inputSchema={ + "type": "object", + "properties": { + "episode_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify episode IDs to check" + } + }, + "required": ["episode_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_EPISODE", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_playlist_by_id", + description="Get a Spotify playlist's full metadata and contents by its Spotify ID", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to retrieve" + }, + "market": { + "type": "string", + "description": "Country market to restrict results (ISO 3166-1 alpha-2, default: None)", + "default": None + } + }, + "required": ["playlist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_user_owned_playlists", + description="Get playlists owned by a specific user", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "Spotify user ID to retrieve owned playlists for" + }, + "limit": { + "type": "integer", + "description": "Max number of playlists to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first playlist to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": ["user_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_update_playlist_details", + description="Change a playlist's name and/or public/private state", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to update" + }, + "name": { + "type": "string", + "description": "New name for the playlist (optional)" + }, + "public": { + "type": "boolean", + "description": "Set to true to make the playlist public, false for private (optional)" + }, + "description": { + "type": "string", + "description": "New description for the playlist (optional)" + } + }, + "required": ["playlist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST"}) + ), + types.Tool( + name="spotify_get_current_user_profile", + description="Get the current authenticated user's profile information", + inputSchema={ + "type": "object", + "properties": {}, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_current_user_top_items", + description="Get the current user's top artists or tracks", + inputSchema={ + "type": "object", + "properties": { + "item_type": { + "type": "string", + "description": "Type of items to retrieve (artists or tracks)", + "enum": ["artists", "tracks"] + }, + "time_range": { + "type": "string", + "description": "Time range for top items (short_term, medium_term, long_term)", + "default": "medium_term" + }, + "limit": { + "type": "integer", + "description": "Number of items to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first item to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": ["item_type"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_spotify_user_public_profile", + description="Get public profile information about a Spotify user by their user ID", + inputSchema={ + "type": "object", + "properties": { + "user_id": { + "type": "string", + "description": "Spotify User ID (username) to retrieve public profile for" + }, + }, + "required": ["user_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_follow_playlist", + description="Follow a Spotify playlist", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to follow" + }, + "public": { + "type": "boolean", + "description": "Set to true to make the playlist public, false for private (optional)" + } + }, + "required": ["playlist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST"}) + ), + types.Tool( + name="spotify_unfollow_playlist", + description="Unfollow a Spotify playlist", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to unfollow" + }, + }, + "required": ["playlist_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST"}) + ), + types.Tool( + name="spotify_get_current_user_followed_artists", + description="Get the current user's followed artists", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of artists to return (default: 20, max: 50)", + "default": 20 + }, + "after": { + "type": "string", + "description": "Cursor to get the next page of results (optional)" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_follow_artists_or_users", + description="Follow one or more artists or Spotify users", + inputSchema={ + "type": "object", + "properties": { + "ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify artist/user IDs to follow" + }, + }, + "required": ["ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER"}) + ), + types.Tool( + name="spotify_unfollow_artists_or_users", + description="Unfollow one or more artists or Spotify users", + inputSchema={ + "type": "object", + "properties": { + "ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify artist/user IDs to unfollow" + }, + "type": { + "type": "string", + "description": "Type of IDs: 'artist' or 'user'", + "enum": ["artist", "user"], + "default": "artist" + } + }, + "required": ["ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER"}) + ), + types.Tool( + name="spotify_check_user_follows", + description="Check if the current user follows one or more artists or Spotify users", + inputSchema={ + "type": "object", + "properties": { + "ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify artist/user IDs to check" + }, + "type": { + "type": "string", + "description": "Type of IDs: 'artist' or 'user'", + "enum": ["artist", "user"], + "default": "artist" + } + }, + "required": ["ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_USER", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_add_items_to_playlist", + description="Add items (tracks, episodes) to a Spotify playlist", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to add items to" + }, + "uris": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify URIs (tracks, episodes) to add" + }, + "position": { + "type": "integer", + "description": "Position in the playlist to insert the items (optional)" + } + }, + "required": ["playlist_id", "uris"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST"}) + ), + types.Tool( + name="spotify_remove_items_from_playlist", + description="Remove items (tracks, episodes) from a Spotify playlist", + inputSchema={ + "type": "object", + "properties": { + "playlist_id": { + "type": "string", + "description": "Spotify playlist ID to remove items from" + }, + "uris": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify URIs (tracks, episodes) to remove" + } + }, + "required": ["playlist_id", "uris"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST"}) + ), + types.Tool( + name="spotify_get_current_user_playlists", + description="Get playlists created by the current authenticated user", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of playlists to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first playlist to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_PLAYLIST", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_multiple_shows", + description="Get detailed information about multiple podcast shows", + inputSchema={ + "type": "object", + "properties": { + "show_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify show IDs to retrieve information for" + }, + }, + "required": ["show_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_show_episodes", + description="Get episodes of a specific podcast show", + inputSchema={ + "type": "object", + "properties": { + "show_id": { + "type": "string", + "description": "Spotify show ID to retrieve episodes for" + }, + "limit": { + "type": "integer", + "description": "Max number of episodes to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first episode to return (for pagination, default: 0)", + "default": 0 + }, + "market": { + "type": "string", + "description": "Country market to restrict results (ISO 3166-1 alpha-2, default: None)", + "default": None + } + }, + "required": ["show_id"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_get_current_user_saved_shows", + description="Get the current user's saved podcast shows", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Max number of shows to return (default: 20, max: 50)", + "default": 20 + }, + "offset": { + "type": "integer", + "description": "Index of the first show to return (for pagination, default: 0)", + "default": 0 + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW", "readOnlyHint": True}) + ), + types.Tool( + name="spotify_save_shows_to_user_library", + description="Save podcast shows to the user's library", + inputSchema={ + "type": "object", + "properties": { + "show_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify show IDs to save" + }, + }, + "required": ["show_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW"}) + ), + types.Tool( + name="spotify_remove_shows_from_user_library", + description="Remove podcast shows from the user's library", + inputSchema={ + "type": "object", + "properties": { + "show_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify show IDs to remove" + }, + }, + "required": ["show_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW"}) + ), + types.Tool( + name="spotify_check_user_saved_shows", + description="Check if a podcast show or multiple shows is saved in the user's library", + inputSchema={ + "type": "object", + "properties": { + "show_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of Spotify show IDs to check" + }, + }, + "required": ["show_ids"] + }, + annotations=types.ToolAnnotations(**{"category": "SPOTIFY_SHOW", "readOnlyHint": True}) + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + logger.info(f"Tool called: {name}") + logger.debug(f"Tool arguments: {json.dumps(arguments, indent=2)}") + + token = get_spotify_access_token() + sp = get_spotify_client() + sp_oauth, user_token = get_user_spotify_client() + + if name == "spotify_search_tracks": + query = arguments.get("query", "") + search_type = arguments.get("type", "") + limit = arguments.get("limit", 10) + logger.info(f"Searching tracks with query: {query}, type: {search_type}, limit: {limit}") + + if not query: + return [ + types.TextContent( + type="text", + text="Query parameter is required for search.", + ) + ] + + result = search_tracks(query, search_type, limit, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_track_info": + track_ids = arguments.get("ids", "") + logger.info(f"Getting track info for track_id: {track_ids}") + + if not track_ids: + return [ + types.TextContent( + type="text", + text="ID parameter is required to get track information.", + ) + ] + + result = get_tracks_info(track_ids, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_user_saved_tracks": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting user saved tracks with limit: {limit}, offset: {offset}") + + result = get_user_saved_tracks(sp_oauth, limit, offset) + logger.info(f"User saved tracks result: {result}") + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_check_user_saved_tracks": + track_ids = arguments.get("track_ids", []) + logger.info(f"Checking user saved tracks for IDs: {track_ids}") + + if not track_ids: + return [ + types.TextContent( + type="text", + text="track_ids parameter is required to check saved tracks.", + ) + ] + + result = check_user_saved_tracks(track_ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_save_tracks_for_current_user": + track_ids = arguments.get("track_ids", []) + logger.info(f"Saving tracks for current user: {track_ids}") + + if not track_ids: + return [ + types.TextContent( + type="text", + text="track_ids parameter is required to save tracks.", + ) + ] + + result = save_tracks_for_current_user(track_ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_remove_user_saved_tracks": + track_ids = arguments.get("track_ids", []) + logger.info(f"Removing tracks for current user: {track_ids}") + + if not track_ids: + return [ + types.TextContent( + type="text", + text="track_ids parameter is required to remove tracks.", + ) + ] + + result = remove_user_saved_tracks(track_ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_albums_info": + album_ids = arguments.get("album_ids", []) + logger.info(f"Getting albums info for IDs: {album_ids}") + + if not album_ids: + return [ + types.TextContent( + type="text", + text="album_ids parameter is required to get album information.", + ) + ] + + result = get_albums_info(album_ids, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_album_tracks": + album_id = arguments.get("album_id", "") + logger.info(f"Getting album tracks for album_id: {album_id}") + + if not album_id: + return [ + types.TextContent( + type="text", + text="album_id parameter is required to get album tracks.", + ) + ] + + result = get_album_tracks(album_id, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_user_saved_albums": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting user saved albums with limit: {limit}, offset: {offset}") + + result = get_user_saved_albums(sp_oauth, limit, offset) + logger.info(f"User saved albums result: {result}") + + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_save_albums_for_current_user": + album_ids = arguments.get("album_ids", []) + logger.info(f"Saving albums for current user: {album_ids}") + + if not album_ids: + return [ + types.TextContent( + type="text", + text="album_ids parameter is required to save albums.", + ) + ] + + result = save_albums_for_current_user(album_ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + + elif name == "spotify_remove_albums_for_current_user": + album_ids = arguments.get("album_ids", []) + logger.info(f"Removing albums for current user: {album_ids}") + if not album_ids: + return [ + types.TextContent( + type="text", + text="album_ids parameter is required to remove albums.", + ) + ] + + result = remove_albums_for_current_user(album_ids, sp_oauth) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_check_user_saved_albums": + album_ids = arguments.get("album_ids", []) + logger.info(f"Checking user saved albums for IDs: {album_ids}") + if not album_ids: + return [ + types.TextContent( + type="text", + text="album_ids parameter is required to check saved albums.", + ) + ] + + result = check_user_saved_albums(album_ids, sp_oauth) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_artists_info": + artist_ids = arguments.get("artist_ids", []) + logger.info(f"Getting artists info for IDs: {artist_ids}") + if not artist_ids: + return [ + types.TextContent( + type="text", + text="artist_ids parameter is required to get artist information.", + ) + ] + result = get_artists_info(artist_ids, sp) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_artist_albums": + artist_id = arguments.get("artist_id", "") + logger.info(f"Getting artist albums for artist_id: {artist_id}") + if not artist_id: + return [ + types.TextContent( + type="text", + text="artist_id parameter is required to get artist albums.", + ) + ] + result = get_artist_albums(artist_id, sp) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_artist_top_tracks": + artist_id = arguments.get("artist_id", "") + country = arguments.get("country", None) + logger.info(f"Getting artist top tracks for artist_id: {artist_id}, country: {country}") + if not artist_id: + return [ + types.TextContent( + type="text", + text="artist_id parameter is required to get artist top tracks.", + ) + ] + result = get_artist_top_tracks(artist_id, sp, country) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_episodes_info": + episode_ids = arguments.get("episode_ids", []) + market = arguments.get("market", None) + logger.info(f"Getting episodes info for IDs: {episode_ids}, market: {market}") + if not episode_ids: + return [ + types.TextContent( + type="text", + text="episode_ids parameter is required to get episode information.", + ) + ] + result = get_episodes_info(episode_ids, sp, market) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_save_episodes_for_current_user": + episode_ids = arguments.get("episode_ids", []) + logger.info(f"Saving episodes for current user: {episode_ids}") + if not episode_ids: + return [ + types.TextContent( + type="text", + text="episode_ids parameter is required to save episodes.", + ) + ] + result = save_episodes_for_current_user(episode_ids, sp_oauth) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + return result + elif name == "spotify_get_user_saved_episodes": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting user saved episodes with limit: {limit}, offset: {offset}") + + result = get_user_saved_episodes(sp_oauth, limit, offset) + logger.info(f"User saved episodes result: {result}") + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_remove_episodes_for_current_user": + episode_ids = arguments.get("episode_ids", []) + logger.info(f"Removing episodes for current user: {episode_ids}") + if not episode_ids: + return [ + types.TextContent( + type="text", + text="episode_ids parameter is required to remove episodes.", + ) + ] + + result = remove_episodes_for_current_user(episode_ids, sp_oauth) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_check_user_saved_episodes": + episode_ids = arguments.get("episode_ids", []) + logger.info(f"Checking user saved episodes for IDs: {episode_ids}") + if not episode_ids: + return [ + types.TextContent( + type="text", + text="episode_ids parameter is required to check saved episodes.", + ) + ] + + result = check_user_saved_episodes(episode_ids, sp_oauth) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_playlist_by_id": + playlist_id = arguments.get("playlist_id", "") + market = arguments.get("market", None) + logger.info(f"Getting playlist by ID: {playlist_id}, market: {market}") + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to get playlist information.", + ) + ] + + result = get_playlist_by_id(playlist_id, sp, market) + result = [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2) + ) + ] + + return result + elif name == "spotify_get_user_owned_playlists": + user_id = arguments.get("user_id", "") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting user owned playlists for user_id: {user_id}, limit: {limit}, offset: {offset}") + + if not user_id: + return [ + types.TextContent( + type="text", + text="user_id parameter is required to get owned playlists.", + ) + ] + + result = get_user_owned_playlists(user_id, sp, limit, offset) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_update_playlist_details": + playlist_id = arguments.get("playlist_id", "") + name = arguments.get("name", None) + public = arguments.get("public", None) + description = arguments.get("description", None) + logger.info( + f"Updating playlist details for playlist_id: {playlist_id}, " + f"name: {name}, public: {public}, description: {description}" + ) + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to update playlist details.", + ) + ] + + result = update_playlist_details(playlist_id, name, public, description, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_current_user_profile": + logger.info("Getting current user profile") + + result = get_current_user_profile(sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_current_user_top_items": + item_type = arguments.get("item_type", "artists") + time_range = arguments.get("time_range", "medium_term") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info( + f"Getting current user top items: type={item_type}, " + f"time_range={time_range}, limit={limit}, offset={offset}" + ) + + result = get_current_user_top_items(sp_oauth, item_type, time_range, limit, offset) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_spotify_user_public_profile": + user_id = arguments.get("user_id", "") + logger.info(f"Getting public profile for user_id: {user_id}") + + if not user_id: + return [ + types.TextContent( + type="text", + text="user_id parameter is required to get public profile.", + ) + ] + + result = get_spotify_user_public_profile(user_id, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_follow_playlist": + playlist_id = arguments.get("playlist_id", "") + public = arguments.get("public", None) + logger.info(f"Following playlist with ID: {playlist_id}, public: {public}") + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to follow a playlist.", + ) + ] + + result = follow_playlist(playlist_id, public, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_unfollow_playlist": + playlist_id = arguments.get("playlist_id", "") + logger.info(f"Unfollowing playlist with ID: {playlist_id}") + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to unfollow a playlist.", + ) + ] + + result = unfollow_playlist(playlist_id, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_current_user_followed_artists": + limit = arguments.get("limit", 20) + after = arguments.get("after", None) + logger.info(f"Getting followed artists with limit: {limit}, after: {after}") + + result = get_current_user_followed_artists(sp_oauth, limit, after) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_follow_artists_or_users": + ids = arguments.get("ids", []) + logger.info(f"Following artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to follow artists or users.", + ) + ] + + result = follow_artists_or_users(ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_unfollow_artists_or_users": + ids = arguments.get("ids", []) + type_ = arguments.get("type", "artist") # or "user" + logger.info(f"Unfollowing artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to unfollow artists or users.", + ) + ] + + result = unfollow_artists_or_users(ids, type_, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_check_user_follows": + ids = arguments.get("ids", []) + type_ = arguments.get("type", "artist") # or "user" + logger.info(f"Checking if user follows artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to check follows.", + ) + ] + + result = check_user_follows(ids, type_, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_add_items_to_playlist": + playlist_id = arguments.get("playlist_id", "") + uris = arguments.get("uris", []) + position = arguments.get("position", None) + logger.info(f"Adding items to playlist: {playlist_id}, uris: {uris}, position: {position}") + + if not playlist_id or not uris: + return [ + types.TextContent( + type="text", + text="playlist_id and uris parameters are required to add items to a playlist.", + ) + ] + + result = add_items_to_playlist(playlist_id, uris, sp_oauth, position) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_remove_items_from_playlist": + playlist_id = arguments.get("playlist_id", "") + uris = arguments.get("uris", []) + logger.info(f"Removing items from playlist: {playlist_id}, uris: {uris}") + + if not playlist_id or not uris: + return [ + types.TextContent( + type="text", + text="playlist_id and uris parameters are required to remove items from a playlist.", + ) + ] + + result = remove_items_from_playlist(playlist_id, uris, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_current_user_playlists": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting current user playlists with limit: {limit}, offset: {offset}") + + result = get_current_user_playlists(sp_oauth, limit, offset) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_multiple_shows": + show_ids = arguments.get("show_ids", []) + logger.info(f"Getting multiple shows for IDs: {show_ids}") + + if not show_ids: + return [ + types.TextContent( + type="text", + text="show_ids parameter is required to get multiple shows.", + ) + ] + + result = get_multiple_shows(show_ids, sp) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_show_episodes": + show_id = arguments.get("show_id", "") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + market = arguments.get("market", "US") + logger.info( + f"Getting episodes for show_id: {show_id}, limit: {limit}, " + f"offset: {offset}, market: {market}" + ) + + if not show_id: + return [ + types.TextContent( + type="text", + text="show_id parameter is required to get show episodes.", + ) + ] + + result = get_show_episodes(show_id, sp, limit, offset, market) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + + elif name == "spotify_get_current_user_saved_shows": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting current user saved shows with limit: {limit}, offset: {offset}") + + result = get_current_user_saved_shows(sp_oauth, limit, offset) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + elif name == "spotify_save_shows_to_user_library": + show_ids = arguments.get("show_ids", []) + logger.info(f"Saving shows to user library: {show_ids}") + + if not show_ids: + return [ + types.TextContent( + type="text", + text="show_ids parameter is required to save shows.", + ) + ] + + result = save_shows_to_user_library(show_ids, sp_oauth) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + elif name == "spotify_get_user_owned_playlists": + user_id = arguments.get("user_id", "") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info( + f"Getting user owned playlists for user_id: {user_id}, " + f"limit: {limit}, offset: {offset}" + ) + + if not user_id: + return [ + types.TextContent( + type="text", + text="user_id parameter is required to get owned playlists.", + ) + ] + + result = get_user_owned_playlists(user_id, sp, limit, offset) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_update_playlist_details": + playlist_id = arguments.get("playlist_id", "") + name = arguments.get("name", None) + public = arguments.get("public", None) + description = arguments.get("description", None) + logger.info( + f"Updating playlist details for playlist_id: {playlist_id}, " + f"name: {name}, public: {public}, description: {description}" + ) + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to update playlist details.", + ) + ] + + result = update_playlist_details(playlist_id, name, public, description, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_current_user_profile": + logger.info("Getting current user profile") + + result = get_current_user_profile(sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_current_user_top_items": + item_type = arguments.get("item_type", "artists") + time_range = arguments.get("time_range", "medium_term") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info( + f"Getting current user top items: type={item_type}, " + f"time_range={time_range}, limit={limit}, offset={offset}" + ) + + result = get_current_user_top_items(sp_oauth, item_type, time_range, limit, offset) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_spotify_user_public_profile": + user_id = arguments.get("user_id", "") + logger.info(f"Getting public profile for user_id: {user_id}") + + if not user_id: + return [ + types.TextContent( + type="text", + text="user_id parameter is required to get public profile.", + ) + ] + + result = get_spotify_user_public_profile(user_id, sp) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_follow_playlist": + playlist_id = arguments.get("playlist_id", "") + public = arguments.get("public", None) + logger.info(f"Following playlist with ID: {playlist_id}, public: {public}") + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to follow a playlist.", + ) + ] + + result = follow_playlist(playlist_id, public, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_unfollow_playlist": + playlist_id = arguments.get("playlist_id", "") + type_ = arguments.get("type", "artist") + logger.info(f"Unfollowing playlist with ID: {playlist_id}") + + if not playlist_id: + return [ + types.TextContent( + type="text", + text="playlist_id parameter is required to unfollow a playlist.", + ) + ] + + result = unfollow_playlist(playlist_id, type_, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_current_user_followed_artists": + limit = arguments.get("limit", 20) + after = arguments.get("after", None) + logger.info(f"Getting followed artists with limit: {limit}, after: {after}") + + result = get_current_user_followed_artists(sp_oauth, limit, after) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_follow_artists_or_users": + ids = arguments.get("ids", []) + logger.info(f"Following artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to follow artists or users.", + ) + ] + + result = follow_artists_or_users(ids, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_unfollow_artists_or_users": + ids = arguments.get("ids", []) + type_ = arguments.get("type", "artist") # or "user" + logger.info(f"Unfollowing artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to unfollow artists or users.", + ) + ] + + result = unfollow_artists_or_users(ids, type_, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_check_user_follows": + ids = arguments.get("ids", []) + type_ = arguments.get("type", "artist") # or "user" + logger.info(f"Checking if user follows artists/users with IDs: {ids}") + + if not ids: + return [ + types.TextContent( + type="text", + text="ids parameter is required to check follows.", + ) + ] + + result = check_user_follows(ids, type_, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_add_items_to_playlist": + playlist_id = arguments.get("playlist_id", "") + uris = arguments.get("uris", []) + position = arguments.get("position", None) + logger.info( + f"Adding items to playlist: {playlist_id}, uris: {uris}, position: {position}" + ) + + if not playlist_id or not uris: + return [ + types.TextContent( + type="text", + text="playlist_id and uris parameters are required to add items to a playlist.", + ) + ] + + result = add_items_to_playlist(playlist_id, uris, sp_oauth, position) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_remove_items_from_playlist": + playlist_id = arguments.get("playlist_id", "") + uris = arguments.get("uris", []) + logger.info(f"Removing items from playlist: {playlist_id}, uris: {uris}") + + if not playlist_id or not uris: + return [ + types.TextContent( + type="text", + text="playlist_id and uris parameters are required to remove items from a playlist.", + ) + ] + + result = remove_items_from_playlist(playlist_id, uris, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_current_user_playlists": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting current user playlists with limit: {limit}, offset: {offset}") + + result = get_current_user_playlists(sp_oauth, limit, offset) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_multiple_shows": + show_ids = arguments.get("show_ids", []) + logger.info(f"Getting multiple shows for IDs: {show_ids}") + + if not show_ids: + return [ + types.TextContent( + type="text", + text="show_ids parameter is required to get multiple shows.", + ) + ] + + result = get_multiple_shows(show_ids, sp) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_show_episodes": + show_id = arguments.get("show_id", "") + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + market = arguments.get("market", "US") + logger.info( + f"Getting episodes for show_id: {show_id}, limit: {limit}, " + f"offset: {offset}, market: {market}" + ) + + if not show_id: + return [ + types.TextContent( + type="text", + text="show_id parameter is required to get show episodes.", + ) + ] + + result = get_show_episodes(show_id, sp, limit, offset, market) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + + elif name == "spotify_get_current_user_saved_shows": + limit = arguments.get("limit", 20) + offset = arguments.get("offset", 0) + logger.info(f"Getting current user saved shows with limit: {limit}, offset: {offset}") + + result = get_current_user_saved_shows(sp_oauth, limit, offset) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + elif name == "spotify_remove_shows_from_user_library": + show_ids = arguments.get("show_ids", []) + logger.info(f"Removing shows from user library: {show_ids}") + + if not show_ids: + return [ + types.TextContent( + type="text", + text="show_ids parameter is required to remove shows.", + ) + ] + + result = remove_shows_from_user_library(show_ids, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + elif name == "spotify_check_user_saved_shows": + show_ids = arguments.get("show_ids", []) + logger.info(f"Checking user saved shows for IDs: {show_ids}") + + if not show_ids: + return [ + types.TextContent( + type="text", + text="show_ids parameter is required to check saved shows.", + ) + ] + + result = check_user_saved_shows(show_ids, sp_oauth) + return [ + types.TextContent(type="text", text=json.dumps(result, indent=2)) + ] + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + + + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers (allow None - will be handled at tool level) + auth_token = request.headers.get("x-auth-token") + + # Set the auth token in context for this request (can be None) + token = auth_token_context.set(auth_token or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers (allow None - will be handled at tool level) + headers = dict(scope.get("headers", [])) + auth_token = headers.get(b"x-auth-token") + if auth_token: + auth_token = auth_token.decode("utf-8") + + # Set the auth token in context for this request (can be None/empty) + token = auth_token_context.set(auth_token or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/spotify/tools/__init__.py b/mcp_servers/spotify/tools/__init__.py new file mode 100644 index 00000000..cf23ff7f --- /dev/null +++ b/mcp_servers/spotify/tools/__init__.py @@ -0,0 +1,56 @@ +from .base import get_spotify_access_token, auth_token_context , get_spotify_client , get_user_spotify_client +from .search import search_tracks +from .tracks import get_tracks_info, get_user_saved_tracks , check_user_saved_tracks , save_tracks_for_current_user, remove_user_saved_tracks +from .albums import get_albums_info, get_album_tracks,get_user_saved_albums,save_albums_for_current_user,remove_albums_for_current_user,check_user_saved_albums +from .artists import get_artists_info, get_artist_albums,get_artist_top_tracks +from .episodes import get_episodes_info , save_episodes_for_current_user , get_user_saved_episodes,remove_episodes_for_current_user,check_user_saved_episodes +from .playlists import get_playlist_by_id,get_user_owned_playlists, update_playlist_details , add_items_to_playlist, remove_items_from_playlist , get_current_user_playlists +from .users import * +from .shows import * +__all__ = [ + 'auth_token_context', + 'get_spotify_access_token', + 'search_tracks', + 'get_tracks_info', + 'get_spotify_client', + 'get_user_spotify_client', + 'get_user_saved_tracks', + 'check_user_saved_tracks', + 'save_tracks_for_current_user', + 'remove_user_saved_tracks', + 'get_albums_info', + 'get_album_tracks', + 'get_user_saved_albums', + 'save_albums_for_current_user', + 'remove_albums_for_current_user', + 'check_user_saved_albums', + 'get_artists_info', + 'get_artist_albums', + 'get_artist_top_tracks', + 'get_episodes_info', + 'save_episodes_for_current_user', + 'get_user_saved_episodes', + 'remove_episodes_for_current_user', + 'check_user_saved_episodes', + 'get_playlist_by_id', + 'get_user_owned_playlists', + 'update_playlist_details', + 'get_current_user_profile', + 'get_current_user_top_items', + 'get_spotify_user_public_profile', + 'follow_playlist', + 'unfollow_playlist', + 'get_current_user_followed_artists', + 'follow_artists_or_users', + 'unfollow_artists_or_users', + 'check_user_follows', + 'add_items_to_playlist', + 'remove_items_from_playlist', + 'get_current_user_playlists', + 'get_multiple_shows', + 'get_show_episodes', + 'get_current_user_saved_shows', + 'save_shows_to_user_library', + 'remove_shows_from_user_library', + 'check_user_saved_shows', +] \ No newline at end of file diff --git a/mcp_servers/spotify/tools/albums.py b/mcp_servers/spotify/tools/albums.py new file mode 100644 index 00000000..9b68de0c --- /dev/null +++ b/mcp_servers/spotify/tools/albums.py @@ -0,0 +1,169 @@ +from spotipy import Spotify +from typing import List, Dict +from .base import get_spotify_client, get_user_spotify_client ,process_album_info, process_album_tracks + + + + + +def get_albums_info(album_ids: List[str], sp: Spotify = None) -> List[Dict]: + """ + Get detailed information about one or multiple albums. + + Parameters: + album_ids (list[str]): Album IDs to look up in Spotify's catalog. + sp (spotipy.Spotify, optional): Spotipy client to use. + + Returns: + list[dict]: Simplified album info for each requested album. + """ + try: + if not sp: + sp = get_spotify_client() + # Spotipy's albums() accepts up to 20 album IDs per call + results = sp.albums(albums=album_ids) + albums = results.get("albums", []) + cleaned_results = [process_album_info(album) for album in albums if album is not None] + return cleaned_results + except Exception as e: + print(f"An error occurred while getting album info: {e}") + return {"error": str(e)} + + + + + +def get_album_tracks( + album_id: str, + sp: Spotify = None, + limit: int = 50, + offset: int = 0 +) -> List[Dict]: + """ + Get catalog information for tracks in a Spotify album. + + Parameters: + album_id (str): Spotify Album ID. + sp (spotipy.Spotify, optional): Spotipy client to use. If None, creates with get_spotify_client(). + limit (int): Max number of tracks to return per page (default 50, max 50). + offset (int): Index of first track to return (for albums with >50 tracks). + + Returns: + list[dict]: List of dicts, each dict contains info about a track. + """ + try: + if not sp: + sp = get_spotify_client() + results = sp.album_tracks(album_id=album_id, limit=limit, offset=offset) + items = results.get("items", []) + return process_album_tracks(items) + except Exception as e: + print(f"An error occurred while getting album tracks: {e}") + return {"error": str(e)} + + +def get_user_saved_albums( + sp: Spotify = None, + limit: int = 20, + offset: int = 0 +) -> List[Dict]: + """ + Fetch the albums saved in the current Spotify user's library. + + Parameters: + sp (spotipy.Spotify, optional): Authenticated Spotipy client with 'user-library-read' scope. + limit (int): Number of albums to return (max 50 per request, default 20). + offset (int): Start index for pagination. + + Returns: + list[dict]: List of dicts with album metadata. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + results = sp.current_user_saved_albums(limit=limit, offset=offset) + items = results.get("items", []) + return [process_album_info(item.get("album")) for item in items if item is not None] + except Exception as e: + print(f"An error occurred while fetching user saved albums: {e}") + return {"error": str(e)} + + +def save_albums_for_current_user(album_ids: List[str], sp: Spotify = None) -> str: + """ + Save one or more albums to the current user's library ("Your Music"). + + Parameters: + album_ids (List[str]): List of Spotify album IDs to save. + sp (spotipy.Spotify, optional): Authenticated Spotify client with 'user-library-modify' scope. + + Returns: + str: "Success" on success, or error message on failure. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(album_ids), MAX_IDS_PER_CALL): + chunk = album_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_albums_add(albums=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while saving albums: {e}") + return f"error: {str(e)}" + + +def remove_albums_for_current_user(album_ids: List[str], sp: Spotify = None) -> str: + """ + Remove one or more albums from the current user's library ("Your Music"). + + Parameters: + album_ids (List[str]): List of Spotify album IDs to remove. + sp (spotipy.Spotify, optional): Authenticated Spotify client with 'user-library-modify' scope. + + Returns: + str: "Success" on success, or error message on failure. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(album_ids), MAX_IDS_PER_CALL): + chunk = album_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_albums_delete(albums=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while removing albums: {e}") + return f"error: {str(e)}" + + +def check_user_saved_albums(album_ids: List[str], sp: Spotify = None) -> List[bool] | Dict: + """ + Check if one or more albums are saved in the current user's Spotify library. + + Parameters: + album_ids (List[str]): List of Spotify album IDs to check (max 50 per call). + sp (spotipy.Spotify, optional): Authenticated Spotipy client with 'user-library-read' scope. + + Returns: + list[bool]: Saved status for each album ID. + dict: Error message if an exception occurs. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + saved_statuses = [] + for i in range(0, len(album_ids), MAX_IDS_PER_CALL): + chunk = album_ids[i:i + MAX_IDS_PER_CALL] + result = sp.current_user_saved_albums_contains(albums=chunk) + saved_statuses.extend(result) + return saved_statuses + except Exception as e: + print(f"An error occurred while checking saved albums: {e}") + return {"error": str(e)} diff --git a/mcp_servers/spotify/tools/artists.py b/mcp_servers/spotify/tools/artists.py new file mode 100644 index 00000000..bbe1c121 --- /dev/null +++ b/mcp_servers/spotify/tools/artists.py @@ -0,0 +1,100 @@ +from spotipy import Spotify +from typing import List, Dict +from .base import get_spotify_client , process_artists_info , process_artist_albums , process_artist_top_tracks + + + + + + +def get_artists_info(artist_ids: List[str], sp: Spotify = None) -> List[Dict]: + """ + Get catalog information for multiple artists by their Spotify IDs. + + Parameters: + artist_ids (List[str]): Spotify artist IDs (max 50 per call). + sp (Spotify, optional): Spotipy client instance. If None, creates one. + + Returns: + List[dict]: Processed artist information. + """ + try: + if not sp: + sp = get_spotify_client() + + # Spotify's `artists()` method accepts up to 50 IDs per request + results = sp.artists(artist_ids) + artists = results.get("artists", []) + return process_artists_info(artists) + except Exception as e: + print(f"An error occurred while getting artist info: {e}") + return {"error": str(e)} + + +def get_artist_albums( + artist_id: str, + sp: Spotify = None, + include_groups: str = None, + limit: int = 20, + offset: int = 0, + market: str = None +) -> List[Dict]: + """ + Retrieve information about an artist's albums. + + Parameters: + artist_id (str): Spotify Artist ID or URI. + sp (Spotify, optional): Spotipy client instance. + include_groups (str, optional): One or more of 'album', 'single', 'compilation', 'appears_on'. + limit (int): Number of albums per request (max 50). + offset (int): Index of the first album to return. + market (str, optional): ISO 3166-1 alpha-2 country code for market filtering. + + Returns: + List[dict]: Processed list of album metadata. + """ + try: + if not sp: + sp = get_spotify_client() + + results = sp.artist_albums( + artist_id=artist_id, + album_type=include_groups, # Maps to Spotify API's 'include_groups' + limit=limit, + offset=offset, + country=market + ) + albums = results.get("items", []) + return process_artist_albums(albums) + except Exception as e: + print(f"An error occurred while fetching artist albums: {e}") + return {"error": str(e)} + + + +def get_artist_top_tracks( + artist_id: str, + sp: Spotify = None, + country: str = None +) -> List[Dict]: + """ + Get an artist's top tracks by country. + + Parameters: + artist_id (str): Spotify artist ID. + sp (Spotify, optional): Spotipy client instance. + country (str, optional): Two-letter country code (e.g., 'US', 'GB', 'IN'). + + Returns: + List[dict]: Processed list of top track metadata. + """ + try: + if not sp: + sp = get_spotify_client() + + results = sp.artist_top_tracks(artist_id, country=country) + tracks = results.get("tracks", []) + return process_artist_top_tracks(tracks) + except Exception as e: + print(f"An error occurred while fetching artist top tracks: {e}") + return {"error": str(e)} diff --git a/mcp_servers/spotify/tools/base.py b/mcp_servers/spotify/tools/base.py new file mode 100644 index 00000000..b20b697a --- /dev/null +++ b/mcp_servers/spotify/tools/base.py @@ -0,0 +1,520 @@ +import os +from contextvars import ContextVar +from typing import Dict, Tuple, Any ,List, Optional , Union + +import spotipy +from spotipy import Spotify +from spotipy.oauth2 import SpotifyClientCredentials, SpotifyOAuth + +# Context-var cache for the access token (per async context) +auth_token_context: ContextVar[str] = ContextVar("auth_token", default="") + +# Scopes required by the app (trimmed and readable) +AUTHORIZATION_SCOPE = ( + "user-library-read " + "playlist-read-private playlist-read-collaborative " + "playlist-modify-private playlist-modify-public " + "user-follow-modify user-follow-read " + "user-read-playback-position user-top-read user-read-recently-played " + "user-library-modify" +) + +# NOTE: Must be registered in the Spotify Developer Dashboard for your app +DEFAULT_REDIRECT_URI = "https://www.google.com/" + + +def get_user_spotify_client() -> Tuple[Spotify, Dict[str, Any]]: + """ + Create a user-authenticated Spotipy client (Authorization Code flow). + + On first run (or when no valid cached token exists), this will open a browser + window for the user to log in and grant permissions. + + Returns: + Tuple[Spotify, Dict[str, Any]]: (Spotipy client, token info dict) + """ + client_id = os.getenv("SPOTIFY_CLIENT_ID") + client_secret = os.getenv("SPOTIFY_CLIENT_SECRET") + redirect_uri = os.getenv("SPOTIFY_REDIRECT_URI", DEFAULT_REDIRECT_URI) + + sp_oauth = SpotifyOAuth( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + scope=AUTHORIZATION_SCOPE, + cache_path=".webassets-cache", + show_dialog=True, + ) + + token_info = sp_oauth.get_cached_token() + if not token_info or not sp_oauth.validate_token(token_info): + # Opens browser for user login/consent if needed + token_info = sp_oauth.get_access_token(as_dict=True) + + sp = spotipy.Spotify(auth=token_info["access_token"]) + return sp, token_info + + +def get_spotify_client() -> Spotify: + """ + Create an app-level Spotipy client (Client Credentials flow). + + Uses SPOTIFY_CLIENT_ID and SPOTIFY_CLIENT_SECRET environment variables. + + Returns: + Spotify: Spotipy client authenticated with client credentials. + + Raises: + ValueError: If required environment variables are missing. + """ + client_id = os.getenv("SPOTIFY_CLIENT_ID") + client_secret = os.getenv("SPOTIFY_CLIENT_SECRET") + + if not client_id or not client_secret: + raise ValueError( + "Spotify client ID/secret not found. " + "Set SPOTIFY_CLIENT_ID and SPOTIFY_CLIENT_SECRET." + ) + + credentials_manager = SpotifyClientCredentials( + client_id=client_id, + client_secret=client_secret, + ) + return Spotify(client_credentials_manager=credentials_manager) + + +def get_spotify_access_token() -> str: + """ + Retrieve a (bearer) access token and cache it in a ContextVar. + + For client-credentials usage only. If a token is already cached in the + current context, it will be returned directly. + + Returns: + str: Raw access token string. + """ + token = auth_token_context.get() + if token: + return token + + spotify = get_spotify_client() + # Access underlying auth manager to fetch the raw token string + access_token = spotify._auth_manager.get_access_token(as_dict=False) # noqa: SLF001 + auth_token_context.set(access_token) + return access_token + +def process_album_info(album_info): + """Process album information to extract relevant details.""" + if not album_info: + return None + return { + "album_id": album_info.get("id"), + "name": album_info.get("name"), + "artists": [artist.get("name") for artist in album_info.get("artists", [])], + "release_date": album_info.get("release_date"), + "total_tracks": album_info.get("total_tracks"), + "album_type": album_info.get("album_type"), + "genres": album_info.get("genres", []), + "label": album_info.get("label"), + "popularity": album_info.get("popularity"), + "external_url": album_info.get("external_urls", {}).get("spotify"), + } + +def process_album_tracks(tracks): + """Process album's track items to extract relevant details.""" + output = [] + for track in tracks: + output.append({ + "track_id": track.get("id"), + "name": track.get("name"), + "artists": [artist.get("name") for artist in track.get("artists", [])], + "duration_ms": track.get("duration_ms"), + "track_number": track.get("track_number"), + "disc_number": track.get("disc_number"), + "explicit": track.get("explicit"), + "preview_url": track.get("preview_url"), + "external_url": track.get("external_urls", {}).get("spotify"), + }) + return output + +def process_artists_info(artists: List[dict]) -> List[dict]: + """ + Process a list of Spotify artist objects to extract key metadata. + + Parameters: + artists (List[dict]): Raw artist data from Spotify API. + + Returns: + List[dict]: Processed list of artist metadata. + """ + output = [] + for artist in artists: + output.append({ + "artist_id": artist.get("id"), + "name": artist.get("name"), + "genres": artist.get("genres", []), + "followers": artist.get("followers", {}).get("total"), + "popularity": artist.get("popularity"), + "images": artist.get("images", []), # List of image dicts: {url, width, height} + "external_url": artist.get("external_urls", {}).get("spotify"), + "type": artist.get("type"), + "uri": artist.get("uri"), + }) + return output + +def process_artist_albums(albums: List[dict]) -> List[dict]: + """ + Process a list of Spotify album objects to extract key metadata. + + Parameters: + albums (List[dict]): Raw album data from Spotify API. + + Returns: + List[dict]: Processed album metadata. + """ + output = [] + for album in albums: + output.append({ + "album_id": album.get("id"), + "name": album.get("name"), + "album_type": album.get("album_type"), + "release_date": album.get("release_date"), + "total_tracks": album.get("total_tracks"), + "artists": [artist["name"] for artist in album.get("artists", [])], + "images": album.get("images", []), # Album cover images + "external_url": album.get("external_urls", {}).get("spotify"), + "available_markets": album.get("available_markets", []), + }) + return output + + +def process_artist_top_tracks(tracks: List[dict]) -> List[dict]: + """ + Process a list of Spotify track objects to extract top track metadata. + + Parameters: + tracks (List[dict]): Raw track data from Spotify API. + + Returns: + List[dict]: Processed track metadata. + """ + output = [] + for track in tracks: + output.append({ + "track_id": track.get("id"), + "name": track.get("name"), + "artists": [artist["name"] for artist in track.get("artists", [])], + "album": track.get("album", {}).get("name"), + "album_id": track.get("album", {}).get("id"), + "duration_ms": track.get("duration_ms"), + "popularity": track.get("popularity"), + "explicit": track.get("explicit"), + "preview_url": track.get("preview_url"), + "external_url": track.get("external_urls", {}).get("spotify"), + }) + return output + +def process_episode_info(episode: dict) -> dict: + """ + Extract relevant metadata from a Spotify episode object. + + Parameters: + episode (dict): Raw episode data from Spotify API. + + Returns: + dict: Processed episode metadata, or None if input is empty. + """ + if not episode: + return None + return { + "episode_id": episode.get("id"), + "name": episode.get("name"), + "description": episode.get("description"), + "show_name": episode.get("show", {}).get("name"), + "show_id": episode.get("show", {}).get("id"), + "release_date": episode.get("release_date"), + "duration_ms": episode.get("duration_ms"), + "explicit": episode.get("explicit"), + "languages": episode.get("languages", []), + "audio_preview_url": episode.get("audio_preview_url"), + "external_url": episode.get("external_urls", {}).get("spotify"), + "images": episode.get("images", []), # Episode cover art + "is_externally_hosted": episode.get("is_externally_hosted"), + "type": episode.get("type"), + "uri": episode.get("uri"), + } + +def process_playlists(playlists: List[Dict]) -> List[Dict]: + """ + Extract relevant info from a list of Spotify playlists. + + Parameters: + playlists (List[dict]): Raw Spotify playlist objects. + + Returns: + List[dict]: Simplified playlist metadata. + """ + processed = [] + for pl in playlists: + processed.append({ + "id": pl.get("id"), + "name": pl.get("name"), + "description": pl.get("description", ""), + "owner_id": pl.get("owner", {}).get("id"), + "owner_name": pl.get("owner", {}).get("display_name"), + "public": pl.get("public"), + "collaborative": pl.get("collaborative"), + "tracks_total": pl.get("tracks", {}).get("total"), + "images": pl.get("images", []), + "external_url": pl.get("external_urls", {}).get("spotify"), + "uri": pl.get("uri"), + "href": pl.get("href"), + "snapshot_id": pl.get("snapshot_id"), + }) + return processed + +def process_spotify_items(items: List[dict], type: str) -> List[dict]: + """ + Process Spotify API items and extract relevant fields based on type. + + Parameters: + items (list[dict]): List of Spotify API item dictionaries. + type (str): One of 'track', 'album', 'artist', 'episode', + 'show', 'playlist', or 'audiobook'. + + Returns: + list[dict]: List of simplified dictionaries with relevant metadata + for each item. + """ + output = [] + if not items: + return output + + for item in items: + if not item: + continue + + if type == "track": + output.append({ + "track_id": item.get("id"), + "name": item.get("name"), + "artists": [artist.get("name") for artist in item.get("artists", [])], + "album": item.get("album", {}).get("name"), + "duration_ms": item.get("duration_ms"), + "popularity": item.get("popularity"), + "explicit": item.get("explicit"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "album": + output.append({ + "album_id": item.get("id"), + "name": item.get("name"), + "artists": [artist.get("name") for artist in item.get("artists", [])], + "release_date": item.get("release_date"), + "total_tracks": item.get("total_tracks"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "artist": + output.append({ + "artist_id": item.get("id"), + "name": item.get("name"), + "genres": item.get("genres"), + "popularity": item.get("popularity"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "episode": + output.append({ + "episode_id": item.get("id"), + "name": item.get("name"), + "release_date": item.get("release_date"), + "duration_ms": item.get("duration_ms"), + "show_name": item.get("show", {}).get("name"), + "explicit": item.get("explicit"), + "description": item.get("description"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "show": + output.append({ + "show_id": item.get("id"), + "name": item.get("name"), + "publisher": item.get("publisher"), + "total_episodes": item.get("total_episodes"), + "description": item.get("description"), + "languages": item.get("languages"), + "explicit": item.get("explicit"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "playlist": + output.append({ + "playlist_id": item.get("id"), + "name": item.get("name"), + "owner": item.get("owner", {}).get("display_name"), + "tracks_count": item.get("tracks", {}).get("total"), + "description": item.get("description"), + "public": item.get("public"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + elif type == "audiobook": + output.append({ + "audiobook_id": item.get("id"), + "name": item.get("name"), + "authors": [author.get("name") for author in item.get("authors", [])] if item.get("authors") else [], + "narrators": [narrator.get("name") for narrator in item.get("narrators", [])] if item.get("narrators") else [], + "release_date": item.get("release_date"), + "publisher": item.get("publisher"), + "description": item.get("description"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + else: + # Fallback: minimal fields + output.append({ + "id": item.get("id"), + "name": item.get("name"), + "external_url": item.get("external_urls", {}).get("spotify"), + }) + + return output + +def process_shows(shows: List[Dict]) -> List[Dict]: + """ + Extract relevant information from a list of Spotify show objects. + + Parameters: + shows (List[Dict]): Raw show objects from Spotify API. + + Returns: + List[Dict]: Simplified show information. + """ + processed = [] + for show in shows or []: + processed.append({ + "id": show.get("id"), + "name": show.get("name"), + "publisher": show.get("publisher"), + "description": show.get("description", ""), + "languages": show.get("languages", []), + "media_type": show.get("media_type"), + "explicit": show.get("explicit"), + "total_episodes": show.get("total_episodes"), + "images": show.get("images", []), # Cover art + "external_url": show.get("external_urls", {}).get("spotify"), + "uri": show.get("uri"), + "type": show.get("type"), + "href": show.get("href"), + "is_externally_hosted": show.get("is_externally_hosted", False), + }) + return processed + +def process_episodes(episodes: List[Dict]) -> List[Dict]: + """ + Extract relevant information from a list of Spotify episode objects. + + Parameters: + episodes (List[Dict]): Raw episode objects from Spotify API. + + Returns: + List[Dict]: Simplified episode information. + """ + processed = [] + for ep in episodes or []: + if ep is None: + continue # Skip invalid entries + processed.append({ + "id": ep.get("id"), + "name": ep.get("name"), + "description": ep.get("description", ""), + "show_name": ep.get("show", {}).get("name"), + "show_id": ep.get("show", {}).get("id"), + "release_date": ep.get("release_date"), + "duration_ms": ep.get("duration_ms"), + "explicit": ep.get("explicit"), + "languages": ep.get("languages", []), + "audio_preview_url": ep.get("audio_preview_url"), + "external_url": ep.get("external_urls", {}).get("spotify"), + "images": ep.get("images", []), + "is_externally_hosted": ep.get("is_externally_hosted"), + "type": ep.get("type"), + "uri": ep.get("uri"), + }) + return processed + + +def process_saved_shows(input_data: List[Dict]) -> List[Dict]: + """ + Process a list of saved shows, extracting relevant information. + + Parameters: + input_data (List[Dict]): Each item has 'added_at' and a 'show' object. + + Returns: + List[Dict]: Simplified saved-show information with 'added_at'. + """ + processed = [] + for item in input_data or []: + show = item.get("show", {}) or {} + processed.append({ + "added_at": item.get("added_at"), + "id": show.get("id"), + "name": show.get("name"), + "publisher": show.get("publisher"), + "description": show.get("description", ""), + "languages": show.get("languages", []), + "media_type": show.get("media_type"), + "explicit": show.get("explicit"), + "total_episodes": show.get("total_episodes"), + "images": show.get("images", []), + "external_url": show.get("external_urls", {}).get("spotify"), + "uri": show.get("uri"), + "type": show.get("type"), + "href": show.get("href"), + "is_externally_hosted": show.get("is_externally_hosted", False), + "available_markets": show.get("available_markets", []), + }) + return processed + +def process_track_info(track_info: Dict) -> Optional[Dict]: + """ + Extract relevant details from a Spotify track object. + + Parameters: + track_info (dict): Raw track data from Spotify API. + + Returns: + dict | None: Simplified track info, or None if input is falsy. + """ + if not track_info: + return None + + return { + "track_id": track_info.get("id"), + "name": track_info.get("name"), + "artists": [a.get("name") for a in track_info.get("artists", [])], + "album": track_info.get("album", {}).get("name"), + "release_date": track_info.get("album", {}).get("release_date"), + "duration_ms": track_info.get("duration_ms"), + "popularity": track_info.get("popularity"), + "track_number": track_info.get("track_number"), + "external_url": track_info.get("external_urls", {}).get("spotify"), + } + + +def process_top_artist_info(artist: Dict) -> Dict: + """Extract relevant info from a Spotify artist object.""" + return { + "id": artist.get("id"), + "name": artist.get("name"), + "genres": artist.get("genres", []), + "popularity": artist.get("popularity"), + "followers": artist.get("followers", {}).get("total"), + "external_url": artist.get("external_urls", {}).get("spotify"), + "images": artist.get("images", []), + "uri": artist.get("uri"), + "type": artist.get("type"), + } \ No newline at end of file diff --git a/mcp_servers/spotify/tools/episodes.py b/mcp_servers/spotify/tools/episodes.py new file mode 100644 index 00000000..3cd9b983 --- /dev/null +++ b/mcp_servers/spotify/tools/episodes.py @@ -0,0 +1,160 @@ +from typing import List, Dict +from spotipy import Spotify +from .base import get_spotify_client, get_user_spotify_client , process_episode_info + + + + + +def get_episodes_info( + episode_ids: List[str], + sp: Spotify = None, + market: str = None +) -> List[Dict]: + """ + Get catalog information for multiple episodes. + + Parameters: + episode_ids (List[str]): Spotify episode IDs (max 50 per request). + sp (Spotify, optional): Spotipy client. If None, uses get_spotify_client(). + market (str, optional): ISO 3166-1 alpha-2 country code (e.g., 'US'). + + Returns: + List[dict]: Processed episode metadata. + """ + try: + if not sp: + sp = get_spotify_client() + + episodes_result = sp.episodes(episode_ids, market=market) + episodes = episodes_result.get("episodes", []) + return [process_episode_info(ep) for ep in episodes if ep] + except Exception as e: + print(f"An error occurred while fetching episode info: {e}") + return {"error": str(e)} + + +def save_episodes_for_current_user( + episode_ids: List[str], + sp: Spotify = None +) -> str: + """ + Save one or more episodes to the current user's library. + + Parameters: + episode_ids (List[str]): Spotify episode IDs to save. + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" on success, or error message string on failure. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(episode_ids), MAX_IDS_PER_CALL): + chunk = episode_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_episodes_add(episodes=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while saving episodes: {e}") + return f"error: {str(e)}" + + +def get_user_saved_episodes( + sp: Spotify = None, + limit: int = 20, + offset: int = 0 +) -> List[Dict]: + """ + Fetch episodes saved in the current user's library. + + Parameters: + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + limit (int): Number of episodes per request (max 50). + offset (int): Index of the first episode to return. + + Returns: + List[dict]: Processed episode metadata with 'added_at' field. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + results = sp.current_user_saved_episodes(limit=limit, offset=offset) + items = results.get("items", []) + + processed_episodes = [] + for item in items: + episode = item.get("episode") + if episode: + processed = process_episode_info(episode) + processed["added_at"] = item.get("added_at") # Timestamp when saved + processed_episodes.append(processed) + + return processed_episodes + except Exception as e: + print(f"An error occurred while fetching user saved episodes: {e}") + return {"error": str(e)} + + +def remove_episodes_for_current_user( + episode_ids: List[str], + sp: Spotify = None +) -> str: + """ + Remove one or more episodes from the current user's library. + + Parameters: + episode_ids (List[str]): Spotify episode IDs to remove. + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" on success, or error message string on failure. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(episode_ids), MAX_IDS_PER_CALL): + chunk = episode_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_episodes_delete(episodes=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while removing episodes: {e}") + return f"error: {str(e)}" + + +def check_user_saved_episodes( + episode_ids: List[str], + sp: Spotify = None +) -> List[bool] | dict: + """ + Check if one or more episodes are saved in the user's library. + + Parameters: + episode_ids (List[str]): Spotify episode IDs to check (max 50 per call). + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + + Returns: + List[bool]: True/False for each episode ID in order. + dict: Error message if something goes wrong. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + saved_statuses = [] + for i in range(0, len(episode_ids), MAX_IDS_PER_CALL): + chunk = episode_ids[i:i + MAX_IDS_PER_CALL] + result = sp.current_user_saved_episodes_contains(episodes=chunk) + saved_statuses.extend(result) + return saved_statuses + except Exception as e: + print(f"An error occurred while checking saved episodes: {e}") + return {"error": str(e)} diff --git a/mcp_servers/spotify/tools/playlists.py b/mcp_servers/spotify/tools/playlists.py new file mode 100644 index 00000000..50b5f3e4 --- /dev/null +++ b/mcp_servers/spotify/tools/playlists.py @@ -0,0 +1,195 @@ +from typing import List, Optional, Dict +from spotipy import Spotify +from .base import get_user_spotify_client, get_spotify_client , process_playlists + + +def get_playlist_by_id( + playlist_id: str, + sp: Spotify = None, + market: str = None +) -> dict: + """ + Get a Spotify playlist's full metadata and contents by its Spotify ID. + + Parameters: + playlist_id (str): Spotify playlist ID. + sp (Spotify, optional): Authenticated Spotipy client. + Required 'playlist-read-private' for private playlists. + market (str, optional): ISO 3166-1 alpha-2 country code to filter track availability. + + Returns: + dict: Full playlist object from Spotify. + """ + try: + if not sp: + sp = get_spotify_client() + return sp.playlist(playlist_id, market=market) + except Exception as e: + print(f"Error fetching playlist: {e}") + return {"error": str(e)} + + +def get_user_owned_playlists( + user_id: str, + sp: Spotify = None, + limit: int = 20, + offset: int = 0 +) -> list: + """ + Get playlists owned by a specific Spotify user. + + Parameters: + user_id (str): Spotify user ID. + sp (Spotify, optional): Authenticated Spotipy client. + limit (int): Playlists per request (max 50). + offset (int): Pagination offset. + + Returns: + list: Playlist dictionaries owned by the given user. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + playlists = sp.user_playlists(user=user_id, limit=limit, offset=offset).get("items", []) + return process_playlists(playlists) + except Exception as e: + print(f"Error fetching user playlists: {e}") + return {"error": str(e)} + + +def get_current_user_playlists( + sp: Spotify = None, + limit: int = 20, + offset: int = 0 +) -> List[Dict]: + """ + Get playlists owned or followed by the current Spotify user. + + Parameters: + sp (Spotify, optional): Authenticated Spotipy client. + Requires 'playlist-read-private' and/or 'playlist-read-collaborative' scopes. + limit (int): Max number of playlists to return (1-50). + offset (int): Pagination offset. + + Returns: + List[dict]: Simplified playlist info. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + playlists_response = sp.current_user_playlists(limit=limit, offset=offset) + playlists = playlists_response.get("items", []) + return process_playlists(playlists) + except Exception as e: + print(f"Error fetching current user's playlists: {e}") + return {"error": str(e)} + + +def update_playlist_details( + playlist_id: str, + name: str = None, + public: bool = None, + description: str = None, + sp: Spotify = None +) -> str: + """ + Update a playlist's name, description, or visibility. + Must be called by the playlist owner. + + Parameters: + playlist_id (str): Spotify playlist ID. + name (str, optional): New playlist name. + public (bool, optional): True for public, False for private. + description (str, optional): Playlist description. + sp (Spotify, optional): Authenticated Spotipy client. + + Returns: + str: "success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + sp.playlist_change_details( + playlist_id=playlist_id, + name=name, + public=public, + description=description, + ) + return "success" + except Exception as e: + print(f"Error updating playlist: {e}") + return f"error: {str(e)}" + + +def add_items_to_playlist( + playlist_id: str, + item_uris: List[str], + sp: Optional[Spotify] = None, + position: Optional[int] = None +) -> dict: + """ + Add one or more tracks or episodes to a playlist. + + Parameters: + playlist_id (str): Spotify playlist ID. + item_uris (List[str]): Spotify URIs or IDs of items to add. + sp (Spotify, optional): Authenticated Spotipy client. + position (int, optional): Position to insert items. Appends if omitted. + + Returns: + dict: Spotify API response (contains 'snapshot_id') or error. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_ITEMS_PER_REQUEST = 100 + results = {"snapshot_id": None} + + for i in range(0, len(item_uris), MAX_ITEMS_PER_REQUEST): + chunk = item_uris[i:i + MAX_ITEMS_PER_REQUEST] + results = sp.playlist_add_items( + playlist_id=playlist_id, + items=chunk, + position=position + ) + + return results + except Exception as e: + print(f"Error adding items to playlist: {e}") + return {"error": str(e)} + + +def remove_items_from_playlist( + playlist_id: str, + item_uris: List[str], + sp: Spotify = None, + snapshot_id: str = None +) -> Dict: + """ + Remove one or more tracks or episodes from a playlist. + + Parameters: + playlist_id (str): Spotify playlist ID. + item_uris (List[str]): Spotify URIs of items to remove. + sp (Spotify, optional): Authenticated Spotipy client. + snapshot_id (str, optional): Snapshot ID for concurrency control. + + Returns: + dict: Spotify API response (contains 'snapshot_id') or error. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + return sp.playlist_remove_all_occurrences_of_items( + playlist_id=playlist_id, + items=item_uris, + snapshot_id=snapshot_id + ) + except Exception as e: + print(f"Error removing items from playlist: {e}") + return {"error": f"{str(e)} - {playlist_id} - {item_uris}"} \ No newline at end of file diff --git a/mcp_servers/spotify/tools/search.py b/mcp_servers/spotify/tools/search.py new file mode 100644 index 00000000..6d057bb3 --- /dev/null +++ b/mcp_servers/spotify/tools/search.py @@ -0,0 +1,34 @@ +from typing import List, Dict, Optional +from spotipy import Spotify +from .base import get_spotify_client , process_spotify_items + + +def search_tracks( + query: str, + type: str = "track", + limit: int = 10, + sp: Optional[Spotify] = None +) -> List[Dict]: + """ + Search Spotify for items of the given type and return simplified metadata. + + Parameters: + query (str): Search query string. + type (str): Spotify item type (e.g., 'track', 'album', 'artist'). + limit (int): Max number of items to return (default: 10). + sp (Spotify, optional): Spotipy client. If None, a new one is created. + + Returns: + list[dict]: Processed search results or an error dictionary. + """ + try: + if not sp: + sp = get_spotify_client() + + results = sp.search(q=query, type=type, limit=limit) + items = results.get(f"{type}s", {}).get("items", []) + return process_spotify_items(items, type) + + except Exception as e: + print(f"An error occurred while searching for {type}s: {e}") + return {"error": str(e)} \ No newline at end of file diff --git a/mcp_servers/spotify/tools/shows.py b/mcp_servers/spotify/tools/shows.py new file mode 100644 index 00000000..82ea7b1f --- /dev/null +++ b/mcp_servers/spotify/tools/shows.py @@ -0,0 +1,191 @@ +from typing import List, Dict, Optional +from spotipy import Spotify +from .base import get_spotify_client, get_user_spotify_client , process_shows , process_episodes , process_saved_shows + + + +def get_multiple_shows( + show_ids: List[str], + sp: Optional[Spotify] = None, + market: str = "US" +) -> List[Dict]: + """ + Get Spotify catalog information for several shows by their Spotify IDs. + + Parameters: + show_ids (List[str]): Spotify show IDs (max 50 per call). + sp (Spotify, optional): Spotipy client. If None, uses client-credentials. + market (str): ISO 3166-1 alpha-2 country code. + + Returns: + List[Dict]: Simplified show information. + """ + if not sp: + sp = get_spotify_client() + + MAX_IDS_PER_CALL = 50 + shows: List[Dict] = [] + try: + for i in range(0, len(show_ids), MAX_IDS_PER_CALL): + chunk = show_ids[i:i + MAX_IDS_PER_CALL] + response = sp.shows(shows=chunk, market=market) + shows.extend(response.get("shows", [])) + return process_shows(shows) + except Exception as e: + print(f"Error fetching shows: {e}") + return [ # Return minimal error stub list to match signature + {"error": str(e)} + ] + + + + +def get_show_episodes( + show_id: str, + sp: Optional[Spotify] = None, + limit: int = 20, + offset: int = 0, + market: str = "US" +) -> List[Dict]: + """ + Get Spotify catalog information about a show's episodes. + + Parameters: + show_id (str): Spotify show ID. + sp (Spotify, optional): Spotipy client. If None, uses client-credentials. + limit (int): Number of episodes per request (1–50). + offset (int): Pagination offset. + market (str): ISO 3166-1 alpha-2 country code. + + Returns: + List[Dict]: Simplified episode information. + """ + try: + if not sp: + sp = get_spotify_client() + + response = sp.show_episodes( + show_id=show_id, + limit=limit, + offset=offset, + market=market + ) + episodes = response.get("items", []) + return process_episodes(episodes) + except Exception as e: + print(f"Error fetching show episodes: {e}") + return [{"error": str(e)}] + + + + + + + +def get_current_user_saved_shows( + sp: Optional[Spotify] = None, + limit: int = 20, + offset: int = 0 +) -> List[Dict]: + """ + Get shows saved in the current Spotify user's library. + + Parameters: + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + limit (int): Number of shows per request (1–50). + offset (int): Pagination offset. + + Returns: + List[Dict]: Simplified saved-show information. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + response = sp.current_user_saved_shows(limit=limit, offset=offset) + return process_saved_shows(response.get("items")) + except Exception as e: + print(f"Error getting saved shows: {e}") + return [{"error": str(e)}] + + +def save_shows_to_user_library( + show_ids: List[str], + sp: Optional[Spotify] = None +) -> str: + """ + Save one or more shows to the current user's library. + + Parameters: + show_ids (List[str]): Spotify show IDs/URIs/URLs (max 50 per call). + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + # Spotify limits: up to 50 show IDs per request + MAX_IDS_PER_CALL = 50 + for i in range(0, len(show_ids), MAX_IDS_PER_CALL): + batch = show_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_shows_add(shows=batch) + return "Success" + except Exception as e: + print(f"Error saving shows to library: {e}") + return f"error: {str(e)}" + + +def remove_shows_from_user_library( + show_ids: List[str], + sp: Optional[Spotify] = None +) -> str: + """ + Remove one or more shows from the current user's library. + + Parameters: + show_ids (List[str]): Spotify show IDs (max 50 per call). + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(show_ids), MAX_IDS_PER_CALL): + chunk = show_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_shows_delete(shows=chunk) + return "Success" + except Exception as e: + print(f"Error removing shows from library: {e}") + return f"error: {str(e)}" + + +def check_user_saved_shows( + show_ids: List[str], + sp: Optional[Spotify] = None +) -> List[bool]: + """ + Check if one or more shows are saved in the current user's library. + + Parameters: + show_ids (List[str]): Spotify show IDs to check (max 50 per call). + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + + Returns: + List[bool]: Saved status for each show ID (in input order). + """ + if not sp: + sp, _ = get_user_spotify_client() + + results: List[bool] = [] + MAX_IDS_PER_CALL = 50 + for i in range(0, len(show_ids), MAX_IDS_PER_CALL): + chunk = show_ids[i:i + MAX_IDS_PER_CALL] + res = sp.current_user_saved_shows_contains(shows=chunk) + results.extend(res) + return results diff --git a/mcp_servers/spotify/tools/tracks.py b/mcp_servers/spotify/tools/tracks.py new file mode 100644 index 00000000..aa30c52e --- /dev/null +++ b/mcp_servers/spotify/tools/tracks.py @@ -0,0 +1,162 @@ +from typing import List, Dict, Optional, Union +from spotipy import Spotify +from .base import get_spotify_client, get_user_spotify_client , process_track_info + + + + + +def get_tracks_info( + track_ids: List[str], + sp: Optional[Spotify] = None +) -> Union[List[Dict], Dict]: + """ + Get detailed information about one or multiple tracks. + + Parameters: + track_ids (List[str]): Spotify track IDs (max 50 per call). + sp (Spotify, optional): Spotipy client. If None, uses client-credentials. + + Returns: + list[dict] | dict: List of simplified tracks, or error dict. + """ + try: + if not sp: + sp = get_spotify_client() + + # Spotipy's tracks() accepts up to 50 IDs + results = sp.tracks(tracks=track_ids) + tracks = results.get("tracks", []) + return [ + t for t in (process_track_info(track) for track in tracks if track is not None) + if t is not None + ] + except Exception as e: + print(f"An error occurred while getting track info: {e}") + return {"error": str(e)} + + +def get_user_saved_tracks( + sp: Optional[Spotify] = None, + limit: int = 20, + offset: int = 0 +) -> Union[List[Dict], Dict]: + """ + Fetch the current user's saved tracks (Liked Songs). + + Parameters: + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + limit (int): Number of tracks per request (max 50). + offset (int): Pagination offset. + + Returns: + list[dict] | dict: Simplified saved tracks (with 'added_at'), or error dict. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + response = sp.current_user_saved_tracks(limit=limit, offset=offset) + items = response.get("items", []) + saved_tracks: List[Dict] = [] + + for item in items: + track = item.get("track") + if not track: + continue + track_info = process_track_info(track) + track_info["added_at"] = item.get("added_at") # Timestamp when saved + saved_tracks.append(track_info) + + return saved_tracks + except Exception as e: + print(f"An error occurred while fetching user saved tracks: {e}") + return {"error": str(e)} + + +def save_tracks_for_current_user( + track_ids: List[str], + sp: Optional[Spotify] = None +) -> str: + """ + Save one or more tracks to the current user's library. + + Parameters: + track_ids (List[str]): Spotify track IDs to save. + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 # Spotify API limit + for i in range(0, len(track_ids), MAX_IDS_PER_CALL): + chunk = track_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_tracks_add(tracks=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while saving tracks: {e}") + return f"error: {str(e)}" + + +def check_user_saved_tracks( + track_ids: List[str], + sp: Optional[Spotify] = None +) -> Union[List[bool], Dict]: + """ + Check if tracks are saved in the current user's library. + + Parameters: + track_ids (List[str]): Spotify track IDs to check (max 50 per call). + sp (Spotify, optional): Authenticated client with 'user-library-read' scope. + + Returns: + list[bool] | dict: True/False for each track ID (input order), or error dict. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + saved_statuses: List[bool] = [] + for i in range(0, len(track_ids), MAX_IDS_PER_CALL): + chunk = track_ids[i:i + MAX_IDS_PER_CALL] + result = sp.current_user_saved_tracks_contains(tracks=chunk) + saved_statuses.extend(result) + return saved_statuses + except Exception as e: + print(f"An error occurred while checking saved tracks: {e}") + return {"error": str(e)} + + +def remove_user_saved_tracks( + track_ids: List[str], + sp: Optional[Spotify] = None +) -> str: + """ + Remove one or more tracks from the current user's saved tracks. + + Parameters: + track_ids (List[str]): Spotify track IDs to remove. + sp (Spotify, optional): Authenticated client with 'user-library-modify' scope. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 # Spotify API limit + for i in range(0, len(track_ids), MAX_IDS_PER_CALL): + chunk = track_ids[i:i + MAX_IDS_PER_CALL] + sp.current_user_saved_tracks_delete(tracks=chunk) + + return "Success" + except Exception as e: + print(f"An error occurred while removing saved tracks: {e}") + return f"error: {str(e)}" diff --git a/mcp_servers/spotify/tools/users.py b/mcp_servers/spotify/tools/users.py new file mode 100644 index 00000000..69b09e46 --- /dev/null +++ b/mcp_servers/spotify/tools/users.py @@ -0,0 +1,273 @@ +from typing import List, Dict, Union, Optional +from spotipy import Spotify +from .base import get_user_spotify_client, get_spotify_client , process_top_artist_info + + +def get_current_user_profile(sp: Optional[Spotify] = None) -> Dict: + """ + Get the current Spotify user's profile details. + + Parameters: + sp (Spotify, optional): Authenticated Spotipy client. If None, a new one is created. + + Returns: + dict: Detailed user profile info as returned by Spotify, or {"error": ...} on failure. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + return sp.current_user() + except Exception as e: + print(f"Error fetching user profile: {e}") + return {"error": str(e)} + + + + + +def process_top_track_info(track: Dict) -> Dict: + """Extract relevant info from a Spotify track object.""" + return { + "id": track.get("id"), + "name": track.get("name"), + "album": { + "id": track.get("album", {}).get("id"), + "name": track.get("album", {}).get("name"), + "images": track.get("album", {}).get("images", []), + }, + "artists": [{"id": a.get("id"), "name": a.get("name")} for a in track.get("artists", [])], + "popularity": track.get("popularity"), + "duration_ms": track.get("duration_ms"), + "explicit": track.get("explicit"), + "external_url": track.get("external_urls", {}).get("spotify"), + "uri": track.get("uri"), + "type": track.get("type"), + } + + +def get_current_user_top_items( + sp: Optional[Spotify] = None, + item_type: str = "artists", # or "tracks" + time_range: str = "medium_term", + limit: int = 20, + offset: int = 0, +) -> List[Dict]: + """ + Return the current user's top artists or tracks (processed). + + Parameters: + sp (Spotify, optional): Authenticated client with 'user-top-read' scope. + item_type (str): "artists" or "tracks". + time_range (str): "short_term" | "medium_term" | "long_term". + limit (int): Items per page (1–50). + offset (int): Pagination offset. + + Returns: + List[Dict]: Processed items. + """ + if not sp: + sp, _ = get_user_spotify_client() + + if item_type == "artists": + results = sp.current_user_top_artists(time_range=time_range, limit=limit, offset=offset) + return [process_top_artist_info(a) for a in results.get("items", [])] + if item_type == "tracks": + results = sp.current_user_top_tracks(time_range=time_range, limit=limit, offset=offset) + return [process_top_track_info(t) for t in results.get("items", [])] + raise ValueError(f"item_type must be 'artists' or 'tracks', got {item_type}") + + +def get_spotify_user_public_profile(user_id: str, sp: Optional[Spotify] = None) -> Dict: + """ + Get public profile information for a Spotify user by user ID. + + Parameters: + user_id (str): Spotify user ID. + sp (Spotify, optional): Spotipy client. If None, client-credentials is used. + + Returns: + dict: Public profile information or {"error": ...} on failure. + """ + try: + if not sp: + sp = get_spotify_client() + return sp.user(user_id) + except Exception as e: + print(f"Error fetching public profile for user {user_id}: {e}") + return {"error": str(e)} + + +def follow_playlist( + playlist_id: str, + public: bool = True, + sp: Optional[Spotify] = None, +) -> str: + """ + Follow a playlist as the current authenticated user. + + Parameters: + playlist_id (str): Spotify playlist ID. + public (bool): Add as public follower if True; else private. + sp (Spotify, optional): Authenticated client. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + sp.current_user_follow_playlist(playlist_id, public=public) + return "Success" + except Exception as e: + print(f"Error following playlist: {e}") + return f"error: {str(e)}" + + +def unfollow_playlist( + playlist_id: str, + sp: Optional[Spotify] = None, +) -> str: + """ + Unfollow a playlist as the current authenticated user. + + Parameters: + playlist_id (str): Spotify playlist ID. + sp (Spotify, optional): Authenticated client. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + sp.current_user_unfollow_playlist(playlist_id) + return "Success" + except Exception as e: + print(f"Error unfollowing playlist: {e}") + return f"error: {str(e)}" + + +def get_current_user_followed_artists( + sp: Optional[Spotify] = None, + limit: int = 20, + after: Optional[str] = None, +) -> Union[List[Dict], Dict]: + """ + Retrieve artists followed by the current user. + + Parameters: + sp (Spotify, optional): Authenticated client with 'user-follow-read' scope. + limit (int): Max artists per request (1–50). + after (str, optional): Last artist ID from previous page (for pagination). + + Returns: + list[dict] | dict: Artist objects (from Spotify) or error dict. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + results = sp.current_user_followed_artists(limit=limit, after=after) + return results.get("artists", {}).get("items", []) + except Exception as e: + print(f"Error retrieving followed artists: {e}") + return {"error": str(e)} + + +def follow_artists_or_users( + ids: List[str], + sp: Optional[Spotify] = None, + type_: str = "artist", # "artist" or "user" +) -> str: + """ + Follow one or more artists or Spotify users. + + Parameters: + ids (List[str]): Spotify IDs to follow (max 50 per call). + sp (Spotify, optional): Authenticated client. + type_ (str): "artist" or "user". + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(ids), MAX_IDS_PER_CALL): + chunk = ids[i:i + MAX_IDS_PER_CALL] + if type_ == "artist": + sp.user_follow_artists(chunk) + elif type_ == "user": + sp.user_follow_users(chunk) + else: + raise ValueError("type_ must be 'artist' or 'user'.") + return "Success" + except Exception as e: + print(f"An error occurred while following artists/users: {e}") + return f"error: {str(e)}" + + +def unfollow_artists_or_users( + ids: List[str], + type_: str = "artist", # "artist" or "user" + sp: Optional[Spotify] = None, +) -> str: + """ + Unfollow one or more artists or Spotify users. + + Parameters: + ids (List[str]): Spotify IDs to unfollow (max 50 per call). + type_ (str): "artist" or "user". + sp (Spotify, optional): Authenticated client with 'user-follow-modify' scope. + + Returns: + str: "Success" or error message. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + MAX_IDS_PER_CALL = 50 + for i in range(0, len(ids), MAX_IDS_PER_CALL): + chunk = ids[i:i + MAX_IDS_PER_CALL] + if type_ == "artist": + sp.user_unfollow_artists(chunk) + elif type_ == "user": + sp.user_unfollow_users(chunk) + else: + raise ValueError("type_ must be 'artist' or 'user'.") + return "Success" + except Exception as e: + print(f"An error occurred while unfollowing artists/users: {e}") + return f"error: {str(e)}" + + +def check_user_follows( + ids: List[str], + follow_type: str = "artist", # "artist" or "user" + sp: Optional[Spotify] = None, +) -> Union[List[bool], Dict]: + """ + Check if the current user follows given artists or users. + + Parameters: + ids (List[str]): Spotify IDs to check (max 50). + follow_type (str): "artist" or "user". + sp (Spotify, optional): Authenticated client with 'user-follow-read' scope. + + Returns: + list[bool] | dict: Per-ID boolean status, or error dict. + """ + try: + if not sp: + sp, _ = get_user_spotify_client() + + if follow_type == "artist": + return sp.current_user_following_artists(ids) + if follow_type == "user": + return sp.current_user_following_users(ids) + raise ValueError('follow_type must be "artist" or "user"') + except Exception as e: + print(f"An error occurred: {e}") + return {"error": str(e)} diff --git a/mcp_servers/supabase/.eslintrc.json b/mcp_servers/supabase/.eslintrc.json new file mode 100644 index 00000000..390256c5 --- /dev/null +++ b/mcp_servers/supabase/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": { + // Package-specific rules can go here + } +} \ No newline at end of file diff --git a/mcp_servers/supabase/Dockerfile b/mcp_servers/supabase/Dockerfile index 0349a431..83423c2c 100644 --- a/mcp_servers/supabase/Dockerfile +++ b/mcp_servers/supabase/Dockerfile @@ -1,12 +1,19 @@ FROM node:22.12-alpine AS builder -COPY mcp_servers/supabase /app - +# Set the working directory inside the container WORKDIR /app -RUN --mount=type=cache,target=/root/.npm npm install +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/supabase/package.json mcp_servers/supabase/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/supabase . -RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev +# Build the application using TypeScript +RUN npm run build FROM node:22-alpine AS release @@ -22,4 +29,4 @@ WORKDIR /app RUN npm ci --ignore-scripts --omit-dev -ENTRYPOINT ["node", "dist/sse.js"] \ No newline at end of file +ENTRYPOINT ["node", "dist/mcp.cjs"] \ No newline at end of file diff --git a/mcp_servers/supabase/README.md b/mcp_servers/supabase/README.md index 67eb19e9..79a702e4 100644 --- a/mcp_servers/supabase/README.md +++ b/mcp_servers/supabase/README.md @@ -1,98 +1,81 @@ -# Supabase MCP Server (Klavis Fork) +# Supabase MCP Server -This directory contains a Model Context Protocol (MCP) server designed to allow LLM to interact with your Supabase projects. It is based on the official [supabase-community/supabase-mcp](https://github.com/supabase-community/supabase-mcp) server but may include modifications specific to the Klavis project. +A Model Context Protocol (MCP) server for Supabase integration. Manage database operations, authentication, and real-time subscriptions using Supabase's API. -This server acts as a bridge, exposing Supabase functionalities (like database querying, project management, log fetching) as tools that MCP-compatible AI clients can utilize. +## šŸš€ Quick Start - Run in 30 Seconds -## License +### 🌐 Using Hosted Service (Recommended for Production) -This specific implementation is licensed under the MIT License. +Get instant access to Supabase with our managed infrastructure - **no setup required**: -## Prerequisites +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -* **Node.js:** Version 22.x or higher is recommended (check with `node -v`). Download from [nodejs.org](https://nodejs.org/). -* **Docker:** Required for the recommended setup method. Download from [docker.com](https://www.docker.com/). -* **Supabase Account:** You need a Supabase account. -* **Supabase Personal Access Token (PAT):** - 1. Go to your Supabase Account Settings -> Access Tokens. - 2. Generate a new token. Give it a descriptive name (e.g., "Klavis MCP Server"). - 3. **Important:** Copy the token immediately. You won't be able to see it again. +```bash +pip install klavis +# or +npm install klavis +``` -## Setup +```python +from klavis import Klavis -1. **Environment Variables:** - * Navigate to the `mcp_servers/supabase` directory in your terminal. - * Copy the example environment file: `cp .env.example .env` - * Edit the newly created `.env` file and add your Supabase Personal Access Token: - ```env - SUPABASE_AUTH_TOKEN=your_supabase_personal_access_token_here - # Add any other required environment variables if needed - ``` +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("SUPABASE", "user123") +``` -## Running the Server Locally +### 🐳 Using Docker (For Self-Hosting) -You can run the server using Docker (recommended) or directly with Node.js. +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/supabase-mcp-server:latest -### Method 1: Docker (Recommended) -This method isolates the server environment and simplifies dependency management. +# Run Supabase MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/supabase-mcp-server:latest -1. **Build the Docker Image:** - * Make sure you are in the root directory of the `klavis` project (the parent directory of `mcp_servers`). - * Run the build command: - ```bash - docker build -t klavis-supabase-mcp -f mcp_servers/supabase/Dockerfile . - ``` - * This command builds an image named `klavis-supabase-mcp` using the specific `Dockerfile` for the Supabase MCP server. The `.` indicates the build context is the current directory (`klavis`). +# Run Supabase MCP Server (no OAuth support) +docker run -p 5000:5000 \ + -e SUPABASE_URL=your_supabase_url \ + -e AUTH_DATA='{"access_token":"your_supabase_anon_key_here"}' \ + ghcr.io/klavis-ai/supabase-mcp-server:latest +``` -2. **Run the Docker Container:** - * Execute the following command: - ```bash - docker run --rm -p 5000:5000 --env-file mcp_servers/supabase/.env klavis-supabase-mcp - ``` - * `--rm`: Automatically removes the container when it exits. - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container (the port the server listens on). - * `--env-file mcp_servers/supabase/.env`: Loads the environment variables (including your `SUPABASE_AUTH_TOKEN`) from the specified `.env` file into the container. - * `klavis-supabase-mcp`: The name of the image to run. +**OAuth Setup:** Supabase requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. -The server should now be running and accessible at `http://localhost:5000`. +**Manual Setup:** Alternatively, provide your Supabase URL and anon key directly. -### Method 2: Node.js (Alternative) +## šŸ› ļø Available Tools -This method runs the server directly on your machine using your local Node.js installation. +- **Database Operations**: Query, insert, update, and delete data +- **Authentication**: User management and authentication flows +- **Real-time**: Subscribe to database changes and real-time updates +- **Storage**: File upload and storage management +- **Functions**: Invoke Supabase Edge Functions -1. **Navigate to the Directory:** - ```bash - cd mcp_servers/supabase - ``` +## šŸ“š Documentation & Support -2. **Install Dependencies:** - * Make sure you have created and configured the `.env` file in this directory as described in the Setup section. - ```bash - npm install - ``` +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -3. **Build the Server Code:** - * This compiles the TypeScript source code into JavaScript. - ```bash - npm run build - ``` +## šŸ¤ Contributing -4. **Run the Server:** - ```bash - node dist/sse.js - ``` +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. -The server should now be running and accessible at `http://localhost:5000`. +## šŸ“œ License -## Configuring Your MCP Client (e.g., Cursor) +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. -To allow your AI assistant to use this server, you need to configure it in the client's settings. MCP clients usually expect a URL where the server's Server-Sent Events (SSE) endpoint is available. +--- -For this server running locally, the endpoint is: - -`http://localhost:5000/sse` - -Consult your specific MCP client's documentation on how to add a custom MCP server. You will typically need to provide this URL. Unlike the original `npx` approach from the community repository, this server runs independently, and the client connects directly to its URL. - -*Note:* Ensure the `SUPABASE_AUTH_TOKEN` is correctly set in the `.env` file used by the server (either via `--env-file` for Docker or by being present in the directory for the Node.js method). The server uses this token to authenticate with Supabase on your behalf. +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/supabase/package-lock.json b/mcp_servers/supabase/package-lock.json index a4716438..e832a920 100644 --- a/mcp_servers/supabase/package-lock.json +++ b/mcp_servers/supabase/package-lock.json @@ -1,15 +1,15 @@ { - "name": "@supabase/mcp-server-supabase", + "name": "@klavis-ai/mcp-server-supabase", "version": "0.3.5", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "@supabase/mcp-server-supabase", + "name": "@klavis-ai/mcp-server-supabase", "version": "0.3.5", "license": "Apache-2.0", "dependencies": { - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "@supabase/mcp-utils": "0.1.3", "common-tags": "^1.8.2", "express": "^5.1.0", @@ -17,7 +17,7 @@ "zod": "^3.24.1" }, "bin": { - "mcp-server-supabase": "dist/sse.js" + "supabase-mcp": "dist/mcp.js" }, "devDependencies": { "@ai-sdk/anthropic": "^1.2.9", @@ -26,7 +26,7 @@ "@types/common-tags": "^1.8.4", "@types/express": "^5.0.0", "@types/node": "^22.8.6", - "ai": "^4.3.4", + "ai": "^5.0.76", "date-fns": "^4.1.0", "dotenv": "^16.5.0", "msw": "^2.7.3", @@ -37,7 +37,7 @@ "tsup": "^8.3.5", "tsx": "^4.19.2", "typescript": "^5.6.3", - "vitest": "^2.1.9" + "vitest": "^3.2.2" } }, "node_modules/@ai-sdk/anthropic": { @@ -57,6 +57,55 @@ "zod": "^3.0.0" } }, + "node_modules/@ai-sdk/gateway": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.0.tgz", + "integrity": "sha512-Gj0PuawK7NkZuyYgO/h5kDK/l6hFOjhLdTq3/Lli1FTl47iGmwhH1IZQpAL3Z09BeFYWakcwUmn02ovIm2wy9g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.12", + "@vercel/oidc": "3.0.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.12.tgz", + "integrity": "sha512-ZtbdvYxdMoria+2SlNarEk6Hlgyf+zzcznlD55EAl+7VZvJaSg2sqPvwArY7L6TfDEDJsnCq0fdhBSkYo0Xqdg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, "node_modules/@ai-sdk/provider": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.2.tgz", @@ -107,49 +156,6 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/@ai-sdk/react": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.8.tgz", - "integrity": "sha512-S2FzCSi4uTF0JuSN6zYMXyiAWVAzi/Hho8ISYgHpGZiICYLNCP2si4DuXQOsnWef3IXzQPLVoE11C63lILZIkw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "2.2.6", - "@ai-sdk/ui-utils": "1.2.7", - "swr": "^2.2.5", - "throttleit": "2.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "react": "^18 || ^19 || ^19.0.0-rc", - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/ui-utils": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.7.tgz", - "integrity": "sha512-OVRxa4SDj0wVsMZ8tGr/whT89oqNtNoXBKmqWC2BRv5ZG6azL2LYZ5ZK35u3lb4l1IE7cWGsLlmq0py0ttsL7A==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.1.2", - "@ai-sdk/provider-utils": "2.2.6", - "zod-to-json-schema": "^3.24.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.23.8" - } - }, "node_modules/@babel/code-frame": { "version": "7.26.2", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", @@ -819,14 +825,15 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.9.0.tgz", - "integrity": "sha512-Jq2EUCQpe0iyO5FGpzVYDNFR6oR53AIrwph9yWl7uSc7IWUMsrmpmSaTGra5hQNunXpM+9oit85p924jWuHzUA==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.1.tgz", + "integrity": "sha512-KG1CZhZfWg+u8pxeM/mByJDScJSrjjxLc8fwQqbsS8xCjBmQfMNEBTotYdNanKekepnfRI85GtgQlctLFpcYPw==", "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", @@ -950,9 +957,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.39.0.tgz", - "integrity": "sha512-lGVys55Qb00Wvh8DMAocp5kIcaNzEFTmGhfFd88LfaogYTRKrdxgtlO5H6S49v2Nd8R2C6wLOal0qv6/kCkOwA==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.50.1.tgz", + "integrity": "sha512-HJXwzoZN4eYTdD8bVV22DN8gsPCAj3V20NHKOs8ezfXanGpmVPR7kalUHd+Y31IJp9stdB87VKPFbsGY3H/2ag==", "cpu": [ "arm" ], @@ -964,9 +971,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.39.0.tgz", - "integrity": "sha512-It9+M1zE31KWfqh/0cJLrrsCPiF72PoJjIChLX+rEcujVRCb4NLQ5QzFkzIZW8Kn8FTbvGQBY5TkKBau3S8cCQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.50.1.tgz", + "integrity": "sha512-PZlsJVcjHfcH53mOImyt3bc97Ep3FJDXRpk9sMdGX0qgLmY0EIWxCag6EigerGhLVuL8lDVYNnSo8qnTElO4xw==", "cpu": [ "arm64" ], @@ -978,9 +985,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.39.0.tgz", - "integrity": "sha512-lXQnhpFDOKDXiGxsU9/l8UEGGM65comrQuZ+lDcGUx+9YQ9dKpF3rSEGepyeR5AHZ0b5RgiligsBhWZfSSQh8Q==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.50.1.tgz", + "integrity": "sha512-xc6i2AuWh++oGi4ylOFPmzJOEeAa2lJeGUGb4MudOtgfyyjr4UPNK+eEWTPLvmPJIY/pgw6ssFIox23SyrkkJw==", "cpu": [ "arm64" ], @@ -992,9 +999,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.39.0.tgz", - "integrity": "sha512-mKXpNZLvtEbgu6WCkNij7CGycdw9cJi2k9v0noMb++Vab12GZjFgUXD69ilAbBh034Zwn95c2PNSz9xM7KYEAQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.50.1.tgz", + "integrity": "sha512-2ofU89lEpDYhdLAbRdeyz/kX3Y2lpYc6ShRnDjY35bZhd2ipuDMDi6ZTQ9NIag94K28nFMofdnKeHR7BT0CATw==", "cpu": [ "x64" ], @@ -1006,9 +1013,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.39.0.tgz", - "integrity": "sha512-jivRRlh2Lod/KvDZx2zUR+I4iBfHcu2V/BA2vasUtdtTN2Uk3jfcZczLa81ESHZHPHy4ih3T/W5rPFZ/hX7RtQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.50.1.tgz", + "integrity": "sha512-wOsE6H2u6PxsHY/BeFHA4VGQN3KUJFZp7QJBmDYI983fgxq5Th8FDkVuERb2l9vDMs1D5XhOrhBrnqcEY6l8ZA==", "cpu": [ "arm64" ], @@ -1020,9 +1027,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.39.0.tgz", - "integrity": "sha512-8RXIWvYIRK9nO+bhVz8DwLBepcptw633gv/QT4015CpJ0Ht8punmoHU/DuEd3iw9Hr8UwUV+t+VNNuZIWYeY7Q==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.50.1.tgz", + "integrity": "sha512-A/xeqaHTlKbQggxCqispFAcNjycpUEHP52mwMQZUNqDUJFFYtPHCXS1VAG29uMlDzIVr+i00tSFWFLivMcoIBQ==", "cpu": [ "x64" ], @@ -1034,9 +1041,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.39.0.tgz", - "integrity": "sha512-mz5POx5Zu58f2xAG5RaRRhp3IZDK7zXGk5sdEDj4o96HeaXhlUwmLFzNlc4hCQi5sGdR12VDgEUqVSHer0lI9g==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.50.1.tgz", + "integrity": "sha512-54v4okehwl5TaSIkpp97rAHGp7t3ghinRd/vyC1iXqXMfjYUTm7TfYmCzXDoHUPTTf36L8pr0E7YsD3CfB3ZDg==", "cpu": [ "arm" ], @@ -1048,9 +1055,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.39.0.tgz", - "integrity": "sha512-+YDwhM6gUAyakl0CD+bMFpdmwIoRDzZYaTWV3SDRBGkMU/VpIBYXXEvkEcTagw/7VVkL2vA29zU4UVy1mP0/Yw==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.50.1.tgz", + "integrity": "sha512-p/LaFyajPN/0PUHjv8TNyxLiA7RwmDoVY3flXHPSzqrGcIp/c2FjwPPP5++u87DGHtw+5kSH5bCJz0mvXngYxw==", "cpu": [ "arm" ], @@ -1062,9 +1069,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.39.0.tgz", - "integrity": "sha512-EKf7iF7aK36eEChvlgxGnk7pdJfzfQbNvGV/+l98iiMwU23MwvmV0Ty3pJ0p5WQfm3JRHOytSIqD9LB7Bq7xdQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.50.1.tgz", + "integrity": "sha512-2AbMhFFkTo6Ptna1zO7kAXXDLi7H9fGTbVaIq2AAYO7yzcAsuTNWPHhb2aTA6GPiP+JXh85Y8CiS54iZoj4opw==", "cpu": [ "arm64" ], @@ -1076,9 +1083,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.39.0.tgz", - "integrity": "sha512-vYanR6MtqC7Z2SNr8gzVnzUul09Wi1kZqJaek3KcIlI/wq5Xtq4ZPIZ0Mr/st/sv/NnaPwy/D4yXg5x0B3aUUA==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.50.1.tgz", + "integrity": "sha512-Cgef+5aZwuvesQNw9eX7g19FfKX5/pQRIyhoXLCiBOrWopjo7ycfB292TX9MDcDijiuIJlx1IzJz3IoCPfqs9w==", "cpu": [ "arm64" ], @@ -1090,9 +1097,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.39.0.tgz", - "integrity": "sha512-NMRUT40+h0FBa5fb+cpxtZoGAggRem16ocVKIv5gDB5uLDgBIwrIsXlGqYbLwW8YyO3WVTk1FkFDjMETYlDqiw==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.50.1.tgz", + "integrity": "sha512-RPhTwWMzpYYrHrJAS7CmpdtHNKtt2Ueo+BlLBjfZEhYBhK00OsEqM08/7f+eohiF6poe0YRDDd8nAvwtE/Y62Q==", "cpu": [ "loong64" ], @@ -1103,10 +1110,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.39.0.tgz", - "integrity": "sha512-0pCNnmxgduJ3YRt+D+kJ6Ai/r+TaePu9ZLENl+ZDV/CdVczXl95CbIiwwswu4L+K7uOIGf6tMo2vm8uadRaICQ==", + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.50.1.tgz", + "integrity": "sha512-eSGMVQw9iekut62O7eBdbiccRguuDgiPMsw++BVUg+1K7WjZXHOg/YOT9SWMzPZA+w98G+Fa1VqJgHZOHHnY0Q==", "cpu": [ "ppc64" ], @@ -1118,9 +1125,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.39.0.tgz", - "integrity": "sha512-t7j5Zhr7S4bBtksT73bO6c3Qa2AV/HqiGlj9+KB3gNF5upcVkx+HLgxTm8DK4OkzsOYqbdqbLKwvGMhylJCPhQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.50.1.tgz", + "integrity": "sha512-S208ojx8a4ciIPrLgazF6AgdcNJzQE4+S9rsmOmDJkusvctii+ZvEuIC4v/xFqzbuP8yDjn73oBlNDgF6YGSXQ==", "cpu": [ "riscv64" ], @@ -1132,9 +1139,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.39.0.tgz", - "integrity": "sha512-m6cwI86IvQ7M93MQ2RF5SP8tUjD39Y7rjb1qjHgYh28uAPVU8+k/xYWvxRO3/tBN2pZkSMa5RjnPuUIbrwVxeA==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.50.1.tgz", + "integrity": "sha512-3Ag8Ls1ggqkGUvSZWYcdgFwriy2lWo+0QlYgEFra/5JGtAd6C5Hw59oojx1DeqcA2Wds2ayRgvJ4qxVTzCHgzg==", "cpu": [ "riscv64" ], @@ -1146,9 +1153,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.39.0.tgz", - "integrity": "sha512-iRDJd2ebMunnk2rsSBYlsptCyuINvxUfGwOUldjv5M4tpa93K8tFMeYGpNk2+Nxl+OBJnBzy2/JCscGeO507kA==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.50.1.tgz", + "integrity": "sha512-t9YrKfaxCYe7l7ldFERE1BRg/4TATxIg+YieHQ966jwvo7ddHJxPj9cNFWLAzhkVsbBvNA4qTbPVNsZKBO4NSg==", "cpu": [ "s390x" ], @@ -1160,9 +1167,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.39.0.tgz", - "integrity": "sha512-t9jqYw27R6Lx0XKfEFe5vUeEJ5pF3SGIM6gTfONSMb7DuG6z6wfj2yjcoZxHg129veTqU7+wOhY6GX8wmf90dA==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.50.1.tgz", + "integrity": "sha512-MCgtFB2+SVNuQmmjHf+wfI4CMxy3Tk8XjA5Z//A0AKD7QXUYFMQcns91K6dEHBvZPCnhJSyDWLApk40Iq/H3tA==", "cpu": [ "x64" ], @@ -1174,9 +1181,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.39.0.tgz", - "integrity": "sha512-ThFdkrFDP55AIsIZDKSBWEt/JcWlCzydbZHinZ0F/r1h83qbGeenCt/G/wG2O0reuENDD2tawfAj2s8VK7Bugg==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.50.1.tgz", + "integrity": "sha512-nEvqG+0jeRmqaUMuwzlfMKwcIVffy/9KGbAGyoa26iu6eSngAYQ512bMXuqqPrlTyfqdlB9FVINs93j534UJrg==", "cpu": [ "x64" ], @@ -1187,10 +1194,24 @@ "linux" ] }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.50.1.tgz", + "integrity": "sha512-RDsLm+phmT3MJd9SNxA9MNuEAO/J2fhW8GXk62G/B4G7sLVumNFbRwDL6v5NrESb48k+QMqdGbHgEtfU0LCpbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.39.0.tgz", - "integrity": "sha512-jDrLm6yUtbOg2TYB3sBF3acUnAwsIksEYjLeHL+TJv9jg+TmTwdyjnDex27jqEMakNKf3RwwPahDIt7QXCSqRQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.50.1.tgz", + "integrity": "sha512-hpZB/TImk2FlAFAIsoElM3tLzq57uxnGYwplg6WDyAxbYczSi8O2eQ+H2Lx74504rwKtZ3N2g4bCUkiamzS6TQ==", "cpu": [ "arm64" ], @@ -1202,9 +1223,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.39.0.tgz", - "integrity": "sha512-6w9uMuza+LbLCVoNKL5FSLE7yvYkq9laSd09bwS0tMjkwXrmib/4KmoJcrKhLWHvw19mwU+33ndC69T7weNNjQ==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.50.1.tgz", + "integrity": "sha512-SXjv8JlbzKM0fTJidX4eVsH+Wmnp0/WcD8gJxIZyR6Gay5Qcsmdbi9zVtnbkGPG8v2vMR1AD06lGWy5FLMcG7A==", "cpu": [ "ia32" ], @@ -1216,9 +1237,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.39.0.tgz", - "integrity": "sha512-yAkUOkIKZlK5dl7u6dg897doBgLXmUHhIINM2c+sND3DZwnrdQkkSiDh7N75Ll4mM4dxSkYfXqU9fW3lLkMFug==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.50.1.tgz", + "integrity": "sha512-StxAO/8ts62KZVRAm4JZYq9+NqNsV7RvimNK+YM7ry//zebEH6meuugqW/P5OFUCjyQgui+9fUxT6d5NShvMvA==", "cpu": [ "x64" ], @@ -1229,6 +1250,13 @@ "win32" ] }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "dev": true, + "license": "MIT" + }, "node_modules/@supabase/mcp-utils": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@supabase/mcp-utils/-/mcp-utils-0.1.3.tgz", @@ -1258,6 +1286,16 @@ "@types/node": "*" } }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, "node_modules/@types/common-tags": { "version": "1.8.4", "resolved": "https://registry.npmjs.org/@types/common-tags/-/common-tags-1.8.4.tgz", @@ -1282,17 +1320,17 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/diff-match-patch": { - "version": "1.0.36", - "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", - "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==", + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", "dev": true, "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", - "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, @@ -1396,39 +1434,50 @@ "dev": true, "license": "MIT" }, + "node_modules/@vercel/oidc": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@vercel/oidc/-/oidc-3.0.3.tgz", + "integrity": "sha512-yNEQvPcVrK9sIe637+I0jD6leluPxzwJKx/Haw6F4H77CdDsszUn5V3o96LPziXkSNE2B83+Z3mjqGKBK/R6Gg==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 20" + } + }, "node_modules/@vitest/expect": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.9.tgz", - "integrity": "sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.2.tgz", + "integrity": "sha512-ipHw0z669vEMjzz3xQE8nJX1s0rQIb7oEl4jjl35qWTwm/KIHERIg/p/zORrjAaZKXfsv7IybcNGHwhOOAPMwQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "2.1.9", - "@vitest/utils": "2.1.9", - "chai": "^5.1.2", - "tinyrainbow": "^1.2.0" + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.2", + "@vitest/utils": "3.2.2", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/mocker": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.9.tgz", - "integrity": "sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.2.tgz", + "integrity": "sha512-jKojcaRyIYpDEf+s7/dD3LJt53c0dPfp5zCPXz9H/kcGrSlovU/t1yEaNzM9oFME3dcd4ULwRI/x0Po1Zf+LTw==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "2.1.9", + "@vitest/spy": "3.2.2", "estree-walker": "^3.0.3", - "magic-string": "^0.30.12" + "magic-string": "^0.30.17" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^5.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -1440,70 +1489,70 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", - "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.2.tgz", + "integrity": "sha512-FY4o4U1UDhO9KMd2Wee5vumwcaHw7Vg4V7yR4Oq6uK34nhEJOmdRYrk3ClburPRUA09lXD/oXWZ8y/Sdma0aUQ==", "dev": true, "license": "MIT", "dependencies": { - "tinyrainbow": "^1.2.0" + "tinyrainbow": "^2.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.9.tgz", - "integrity": "sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.2.tgz", + "integrity": "sha512-GYcHcaS3ejGRZYed2GAkvsjBeXIEerDKdX3orQrBJqLRiea4NSS9qvn9Nxmuy1IwIB+EjFOaxXnX79l8HFaBwg==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "2.1.9", - "pathe": "^1.1.2" + "@vitest/utils": "3.2.2", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.9.tgz", - "integrity": "sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.2.tgz", + "integrity": "sha512-aMEI2XFlR1aNECbBs5C5IZopfi5Lb8QJZGGpzS8ZUHML5La5wCbrbhLOVSME68qwpT05ROEEOAZPRXFpxZV2wA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "2.1.9", - "magic-string": "^0.30.12", - "pathe": "^1.1.2" + "@vitest/pretty-format": "3.2.2", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/spy": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.9.tgz", - "integrity": "sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.2.tgz", + "integrity": "sha512-6Utxlx3o7pcTxvp0u8kUiXtRFScMrUg28KjB3R2hon7w4YqOFAEA9QwzPVVS1QNL3smo4xRNOpNZClRVfpMcYg==", "dev": true, "license": "MIT", "dependencies": { - "tinyspy": "^3.0.2" + "tinyspy": "^4.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", - "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.2.tgz", + "integrity": "sha512-qJYMllrWpF/OYfWHP32T31QCaLa3BAzT/n/8mNGhPdVcjY+JYazQFO1nsJvXU12Kp1xMpNY4AGuljPTNjQve6A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "2.1.9", - "loupe": "^3.1.2", - "tinyrainbow": "^1.2.0" + "@vitest/pretty-format": "3.2.2", + "loupe": "^3.1.3", + "tinyrainbow": "^2.0.0" }, "funding": { "url": "https://opencollective.com/vitest" @@ -1533,32 +1582,77 @@ } }, "node_modules/ai": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/ai/-/ai-4.3.5.tgz", - "integrity": "sha512-hxJ+6YCdGOK1MVPGITmz1if+LXR/aW72w8TI8kiV+3R7lpK1hfpApR8EjqN2ag6cWa0R7OEI3gb/srWkQ3hT2Q==", + "version": "5.0.76", + "resolved": "https://registry.npmjs.org/ai/-/ai-5.0.76.tgz", + "integrity": "sha512-ZCxi1vrpyCUnDbtYrO/W8GLvyacV9689f00yshTIQ3mFFphbD7eIv40a2AOZBv3GGRA7SSRYIDnr56wcS/gyQg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.2", - "@ai-sdk/provider-utils": "2.2.6", - "@ai-sdk/react": "1.2.8", - "@ai-sdk/ui-utils": "1.2.7", - "@opentelemetry/api": "1.9.0", - "jsondiffpatch": "0.6.0" + "@ai-sdk/gateway": "2.0.0", + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.12", + "@opentelemetry/api": "1.9.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "react": "^18 || ^19 || ^19.0.0-rc", - "zod": "^3.23.8" + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/ai/node_modules/@ai-sdk/provider": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "json-schema": "^0.4.0" }, - "peerDependenciesMeta": { - "react": { - "optional": true - } + "engines": { + "node": ">=18" + } + }, + "node_modules/ai/node_modules/@ai-sdk/provider-utils": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.12.tgz", + "integrity": "sha512-ZtbdvYxdMoria+2SlNarEk6Hlgyf+zzcznlD55EAl+7VZvJaSg2sqPvwArY7L6TfDEDJsnCq0fdhBSkYo0Xqdg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4.1.8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ajv/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/ansi-colors": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", @@ -1769,19 +1863,6 @@ "node": ">=12" } }, - "node_modules/chalk": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", - "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, "node_modules/change-case": { "version": "5.4.4", "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", @@ -2037,9 +2118,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -2072,23 +2153,6 @@ "node": ">= 0.8" } }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/diff-match-patch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", - "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==", - "dev": true, - "license": "Apache-2.0" - }, "node_modules/dotenv": { "version": "16.5.0", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", @@ -2164,9 +2228,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, @@ -2271,9 +2335,9 @@ } }, "node_modules/eventsource-parser": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", - "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -2350,15 +2414,23 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "license": "MIT" }, "node_modules/fdir": { - "version": "6.4.3", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.3.tgz", - "integrity": "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "dev": true, "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -2763,24 +2835,6 @@ "dev": true, "license": "MIT" }, - "node_modules/jsondiffpatch": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz", - "integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/diff-match-patch": "^1.0.36", - "chalk": "^5.3.0", - "diff-match-patch": "^1.0.5" - }, - "bin": { - "jsondiffpatch": "bin/jsondiffpatch.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, "node_modules/lilconfig": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", @@ -3170,9 +3224,9 @@ "license": "MIT" }, "node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "dev": true, "license": "MIT" }, @@ -3194,9 +3248,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", "engines": { @@ -3236,9 +3290,9 @@ } }, "node_modules/postcss": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -3256,7 +3310,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -3372,7 +3426,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -3424,17 +3477,6 @@ "node": ">= 0.8" } }, - "node_modules/react": { - "version": "19.1.0", - "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", - "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", - "dev": true, - "license": "MIT", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/readdirp": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", @@ -3497,13 +3539,13 @@ } }, "node_modules/rollup": { - "version": "4.39.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.39.0.tgz", - "integrity": "sha512-thI8kNc02yNvnmJp8dr3fNWJ9tCONDhp6TV35X6HkKGGs9E6q7YWCHbe5vKiTa7TAiNcFEmXKj3X/pG2b3ci0g==", + "version": "4.50.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.50.1.tgz", + "integrity": "sha512-78E9voJHwnXQMiQdiqswVLZwJIzdBKJ1GdI5Zx6XwoFKUIk09/sSrr+05QFzvYb8q6Y9pPV45zzDuYa3907TZA==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.7" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -3513,26 +3555,27 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.39.0", - "@rollup/rollup-android-arm64": "4.39.0", - "@rollup/rollup-darwin-arm64": "4.39.0", - "@rollup/rollup-darwin-x64": "4.39.0", - "@rollup/rollup-freebsd-arm64": "4.39.0", - "@rollup/rollup-freebsd-x64": "4.39.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.39.0", - "@rollup/rollup-linux-arm-musleabihf": "4.39.0", - "@rollup/rollup-linux-arm64-gnu": "4.39.0", - "@rollup/rollup-linux-arm64-musl": "4.39.0", - "@rollup/rollup-linux-loongarch64-gnu": "4.39.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.39.0", - "@rollup/rollup-linux-riscv64-gnu": "4.39.0", - "@rollup/rollup-linux-riscv64-musl": "4.39.0", - "@rollup/rollup-linux-s390x-gnu": "4.39.0", - "@rollup/rollup-linux-x64-gnu": "4.39.0", - "@rollup/rollup-linux-x64-musl": "4.39.0", - "@rollup/rollup-win32-arm64-msvc": "4.39.0", - "@rollup/rollup-win32-ia32-msvc": "4.39.0", - "@rollup/rollup-win32-x64-msvc": "4.39.0", + "@rollup/rollup-android-arm-eabi": "4.50.1", + "@rollup/rollup-android-arm64": "4.50.1", + "@rollup/rollup-darwin-arm64": "4.50.1", + "@rollup/rollup-darwin-x64": "4.50.1", + "@rollup/rollup-freebsd-arm64": "4.50.1", + "@rollup/rollup-freebsd-x64": "4.50.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.50.1", + "@rollup/rollup-linux-arm-musleabihf": "4.50.1", + "@rollup/rollup-linux-arm64-gnu": "4.50.1", + "@rollup/rollup-linux-arm64-musl": "4.50.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.50.1", + "@rollup/rollup-linux-ppc64-gnu": "4.50.1", + "@rollup/rollup-linux-riscv64-gnu": "4.50.1", + "@rollup/rollup-linux-riscv64-musl": "4.50.1", + "@rollup/rollup-linux-s390x-gnu": "4.50.1", + "@rollup/rollup-linux-x64-gnu": "4.50.1", + "@rollup/rollup-linux-x64-musl": "4.50.1", + "@rollup/rollup-openharmony-arm64": "4.50.1", + "@rollup/rollup-win32-arm64-msvc": "4.50.1", + "@rollup/rollup-win32-ia32-msvc": "4.50.1", + "@rollup/rollup-win32-x64-msvc": "4.50.1", "fsevents": "~2.3.2" } }, @@ -3943,20 +3986,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/swr": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/swr/-/swr-2.3.3.tgz", - "integrity": "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A==", - "dev": true, - "license": "MIT", - "dependencies": { - "dequal": "^2.0.3", - "use-sync-external-store": "^1.4.0" - }, - "peerDependencies": { - "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/thenify": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", @@ -3980,19 +4009,6 @@ "node": ">=0.8" } }, - "node_modules/throttleit": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz", - "integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", @@ -4008,14 +4024,14 @@ "license": "MIT" }, "node_modules/tinyglobby": { - "version": "0.2.12", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz", - "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==", + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "dev": true, "license": "MIT", "dependencies": { - "fdir": "^6.4.3", - "picomatch": "^4.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">=12.0.0" @@ -4025,9 +4041,9 @@ } }, "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.0.tgz", + "integrity": "sha512-7CotroY9a8DKsKprEy/a14aCCm8jYVmR7aFy4fpkZM8sdpNJbKkixuNjgM50yCmip2ezc8z4N7k3oe2+rfRJCQ==", "dev": true, "license": "MIT", "engines": { @@ -4035,9 +4051,9 @@ } }, "node_modules/tinyrainbow": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", - "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", "dev": true, "license": "MIT", "engines": { @@ -4045,9 +4061,9 @@ } }, "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", "dev": true, "license": "MIT", "engines": { @@ -4245,6 +4261,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/uri-js-replace": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/uri-js-replace/-/uri-js-replace-1.0.1.tgz", @@ -4263,16 +4288,6 @@ "requires-port": "^1.0.0" } }, - "node_modules/use-sync-external-store": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz", - "integrity": "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -4283,21 +4298,24 @@ } }, "node_modules/vite": { - "version": "5.4.18", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.18.tgz", - "integrity": "sha512-1oDcnEp3lVyHCuQ2YFelM4Alm2o91xNoMncRm1U7S+JdYfYOvbiGZ3/CxGttrOu2M/KcGz7cRC2DoNUA6urmMA==", + "version": "7.1.11", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.11.tgz", + "integrity": "sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" + "esbuild": "^0.25.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -4306,19 +4324,25 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" }, "peerDependenciesMeta": { "@types/node": { "optional": true }, + "jiti": { + "optional": true + }, "less": { "optional": true }, @@ -4339,504 +4363,84 @@ }, "terser": { "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true } } }, "node_modules/vite-node": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.9.tgz", - "integrity": "sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.2.tgz", + "integrity": "sha512-Xj/jovjZvDXOq2FgLXu8NsY4uHUMWtzVmMC2LkCu9HWdr9Qu1Is5sanX3Z4jOFKdohfaWDnEJWp9pRP0vVpAcA==", "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.3.7", - "es-module-lexer": "^1.5.4", - "pathe": "^1.1.2", - "vite": "^5.0.0" + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" }, "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, - "node_modules/vite/node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], + "node_modules/vitest": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.2.tgz", + "integrity": "sha512-fyNn/Rp016Bt5qvY0OQvIUCwW2vnaEBLxP42PmKbNIoasSYjML+8xyeADOPvBe+Xfl/ubIw4og7Lt9jflRsCNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.2", + "@vitest/mocker": "3.2.2", + "@vitest/pretty-format": "^3.2.2", + "@vitest/runner": "3.2.2", + "@vitest/snapshot": "3.2.2", + "@vitest/spy": "3.2.2", + "@vitest/utils": "3.2.2", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.0", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.2", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/vite/node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" - } - }, - "node_modules/vitest": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.9.tgz", - "integrity": "sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/expect": "2.1.9", - "@vitest/mocker": "2.1.9", - "@vitest/pretty-format": "^2.1.9", - "@vitest/runner": "2.1.9", - "@vitest/snapshot": "2.1.9", - "@vitest/spy": "2.1.9", - "@vitest/utils": "2.1.9", - "chai": "^5.1.2", - "debug": "^4.3.7", - "expect-type": "^1.1.0", - "magic-string": "^0.30.12", - "pathe": "^1.1.2", - "std-env": "^3.8.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.1", - "tinypool": "^1.0.1", - "tinyrainbow": "^1.2.0", - "vite": "^5.0.0", - "vite-node": "2.1.9", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { "@edge-runtime/vm": "*", - "@types/node": "^18.0.0 || >=20.0.0", - "@vitest/browser": "2.1.9", - "@vitest/ui": "2.1.9", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.2", + "@vitest/ui": "3.2.2", "happy-dom": "*", "jsdom": "*" }, @@ -4844,6 +4448,9 @@ "@edge-runtime/vm": { "optional": true }, + "@types/debug": { + "optional": true + }, "@types/node": { "optional": true }, @@ -5147,9 +4754,9 @@ } }, "node_modules/zod": { - "version": "3.24.2", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz", - "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" diff --git a/mcp_servers/supabase/package.json b/mcp_servers/supabase/package.json index 5ad1318b..d76e585e 100644 --- a/mcp_servers/supabase/package.json +++ b/mcp_servers/supabase/package.json @@ -1,14 +1,15 @@ { - "name": "@supabase/mcp-server-supabase", + "name": "@klavis-ai/mcp-server-supabase", "version": "0.3.5", "description": "MCP server for interacting with Supabase", "license": "Apache-2.0", "type": "module", - "main": "dist/index.cjs", - "types": "dist/index.d.ts", + "main": "dist/mcp.cjs", + "types": "dist/mcp.d.ts", "sideEffects": false, "scripts": { "build": "tsup --clean", + "start": "node dist/mcp.cjs", "prepublishOnly": "npm run build", "test": "vitest", "test:e2e": "vitest --project e2e", @@ -19,17 +20,17 @@ "dist/**/*" ], "bin": { - "supabase": "./dist/sse.js" + "supabase-mcp": "./dist/mcp.js" }, "exports": { ".": { - "import": "./dist/index.js", - "types": "./dist/index.d.ts", - "default": "./dist/index.cjs" + "import": "./dist/mcp.js", + "types": "./dist/mcp.d.ts", + "default": "./dist/mcp.cjs" } }, "dependencies": { - "@modelcontextprotocol/sdk": "^1.4.1", + "@modelcontextprotocol/sdk": "^1.12.1", "@supabase/mcp-utils": "0.1.3", "common-tags": "^1.8.2", "openapi-fetch": "^0.13.4", @@ -43,7 +44,7 @@ "@types/common-tags": "^1.8.4", "@types/node": "^22.8.6", "@types/express": "^5.0.0", - "ai": "^4.3.4", + "ai": "^5.0.76", "date-fns": "^4.1.0", "dotenv": "^16.5.0", "msw": "^2.7.3", @@ -54,6 +55,6 @@ "tsup": "^8.3.5", "tsx": "^4.19.2", "typescript": "^5.6.3", - "vitest": "^2.1.9" + "vitest": "^3.2.2" } } diff --git a/mcp_servers/supabase/src/index.ts b/mcp_servers/supabase/src/index.ts deleted file mode 100644 index 3693f485..00000000 --- a/mcp_servers/supabase/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './server.js'; diff --git a/mcp_servers/supabase/src/mcp.ts b/mcp_servers/supabase/src/mcp.ts new file mode 100644 index 00000000..72e173b7 --- /dev/null +++ b/mcp_servers/supabase/src/mcp.ts @@ -0,0 +1,137 @@ +#!/usr/bin/env node + +import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { createSupabaseMcpServer, asyncLocalStorage } from './server.js'; +import express, { type Request } from 'express'; +import * as dotenv from 'dotenv'; + +// Load environment variables +dotenv.config(); + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: Supabase access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +const getSupabaseMcpServer = () => { + const server = createSupabaseMcpServer({ + platform: {}, + readOnly: false, + }); + return server; +} + +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req, res) => { + const accessToken = extractAccessToken(req); + + const server = getSupabaseMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ accessToken }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req, res) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req, res) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +// to support multiple simultaneous connections we have a lookup object from +// sessionId to transport +const transports: { [sessionId: string]: SSEServerTransport } = {}; + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport('/messages', res); + transports[transport.sessionId] = transport; + res.on("close", () => { + delete transports[transport.sessionId]; + }); + const server = getSupabaseMcpServer(); + await server.connect(transport); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + const transport = transports[sessionId]; + if (transport) { + const accessToken = extractAccessToken(req); + asyncLocalStorage.run({ accessToken }, async () => { + await transport.handlePostMessage(req, res); + }); + } else { + res.status(400).send('No transport found for sessionId'); + } +}); + +const PORT = 5000; +app.listen(PORT, () => { + console.log(`Supabase MCP Server running on port ${PORT}`); +}); diff --git a/mcp_servers/supabase/src/server.test.ts b/mcp_servers/supabase/src/server.test.ts index a757a43e..6378ffe2 100644 --- a/mcp_servers/supabase/src/server.test.ts +++ b/mcp_servers/supabase/src/server.test.ts @@ -118,7 +118,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'list_organizations', + name: 'supabase_list_organizations', arguments: {}, }); @@ -138,7 +138,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'get_organization', + name: 'supabase_get_organization', arguments: { id: org.id, }, @@ -157,7 +157,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'get_cost', + name: 'supabase_get_cost', arguments: { type: 'project', organization_id: freeOrg.id, @@ -179,7 +179,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'get_cost', + name: 'supabase_get_cost', arguments: { type: 'project', organization_id: paidOrg.id, @@ -208,7 +208,7 @@ describe('tools', () => { priorProject.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'get_cost', + name: 'supabase_get_cost', arguments: { type: 'project', organization_id: paidOrg.id, @@ -237,7 +237,7 @@ describe('tools', () => { priorProject.status = 'INACTIVE'; const result = await callTool({ - name: 'get_cost', + name: 'supabase_get_cost', arguments: { type: 'project', organization_id: paidOrg.id, @@ -259,7 +259,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'get_cost', + name: 'supabase_get_cost', arguments: { type: 'branch', organization_id: paidOrg.id, @@ -293,7 +293,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'list_projects', + name: 'supabase_list_projects', arguments: {}, }); @@ -316,7 +316,7 @@ describe('tools', () => { }); const result = await callTool({ - name: 'get_project', + name: 'supabase_get_project', arguments: { id: project.id, }, @@ -352,7 +352,7 @@ describe('tools', () => { }; const result = await callTool({ - name: 'create_project', + name: 'supabase_create_project', arguments: newProject, }); @@ -378,7 +378,7 @@ describe('tools', () => { }); const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'project', recurrence: 'monthly', @@ -394,7 +394,7 @@ describe('tools', () => { }; const result = await callTool({ - name: 'create_project', + name: 'supabase_create_project', arguments: newProject, }); @@ -428,7 +428,7 @@ describe('tools', () => { }; const createProjectPromise = callTool({ - name: 'create_project', + name: 'supabase_create_project', arguments: newProject, }); @@ -454,7 +454,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; await callTool({ - name: 'pause_project', + name: 'supabase_pause_project', arguments: { project_id: project.id, }, @@ -480,7 +480,7 @@ describe('tools', () => { project.status = 'INACTIVE'; await callTool({ - name: 'restore_project', + name: 'supabase_restore_project', arguments: { project_id: project.id, }, @@ -506,7 +506,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'get_project_url', + name: 'supabase_get_project_url', arguments: { project_id: project.id, }, @@ -529,7 +529,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'get_anon_key', + name: 'supabase_get_anon_key', arguments: { project_id: project.id, }, @@ -556,7 +556,7 @@ describe('tools', () => { const query = 'select 1+1 as sum'; const result = await callTool({ - name: 'execute_sql', + name: 'supabase_execute_sql', arguments: { project_id: project.id, query, @@ -585,7 +585,7 @@ describe('tools', () => { const query = 'select 1+1 as sum'; const result = await callTool({ - name: 'execute_sql', + name: 'supabase_execute_sql', arguments: { project_id: project.id, query, @@ -615,7 +615,7 @@ describe('tools', () => { 'create table test (id integer generated always as identity primary key)'; const resultPromise = callTool({ - name: 'execute_sql', + name: 'supabase_execute_sql', arguments: { project_id: project.id, query, @@ -648,7 +648,7 @@ describe('tools', () => { 'create table test (id integer generated always as identity primary key)'; const result = await callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: project.id, name, @@ -659,7 +659,7 @@ describe('tools', () => { expect(result).toEqual([]); const listMigrationsResult = await callTool({ - name: 'list_migrations', + name: 'supabase_list_migrations', arguments: { project_id: project.id, }, @@ -673,7 +673,7 @@ describe('tools', () => { ]); const listTablesResult = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: project.id, schemas: ['public'], @@ -749,7 +749,7 @@ describe('tools', () => { 'create table test (id integer generated always as identity primary key)'; const resultPromise = callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: project.id, name, @@ -785,7 +785,7 @@ describe('tools', () => { await project.db.exec('create table test.test_2 (id serial primary key);'); const result = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: project.id, schemas: ['test'], @@ -817,7 +817,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: project.id, }, @@ -857,7 +857,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'list_extensions', + name: 'supabase_list_extensions', arguments: { project_id: project.id, }, @@ -880,7 +880,7 @@ describe('tools', () => { const { callTool } = await setup({ accessToken: 'bad-token' }); const listOrganizationsPromise = callTool({ - name: 'list_organizations', + name: 'supabase_list_organizations', arguments: {}, }); @@ -909,7 +909,7 @@ describe('tools', () => { const query = 'invalid sql'; const applyMigrationPromise = callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: project.id, name, @@ -941,7 +941,7 @@ describe('tools', () => { const query = 'invalid sql'; const executeSqlPromise = callTool({ - name: 'execute_sql', + name: 'supabase_execute_sql', arguments: { project_id: project.id, query, @@ -981,7 +981,7 @@ describe('tools', () => { for (const service of services) { const result = await callTool({ - name: 'get_logs', + name: 'supabase_get_logs', arguments: { project_id: project.id, service, @@ -1010,7 +1010,7 @@ describe('tools', () => { const invalidService = 'invalid-service'; const getLogsPromise = callTool({ - name: 'get_logs', + name: 'supabase_get_logs', arguments: { project_id: project.id, service: invalidService, @@ -1036,7 +1036,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1046,7 +1046,7 @@ describe('tools', () => { const branchName = 'test-branch'; const result = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: branchName, @@ -1089,7 +1089,7 @@ describe('tools', () => { const branchName = 'test-branch'; const createBranchPromise = callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: branchName, @@ -1118,7 +1118,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1127,7 +1127,7 @@ describe('tools', () => { }); const branch = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: 'test-branch', @@ -1136,7 +1136,7 @@ describe('tools', () => { }); const listBranchesResult = await callTool({ - name: 'list_branches', + name: 'supabase_list_branches', arguments: { project_id: project.id, }, @@ -1148,14 +1148,14 @@ describe('tools', () => { expect(listBranchesResult).toHaveLength(2); await callTool({ - name: 'delete_branch', + name: 'supabase_delete_branch', arguments: { branch_id: branch.id, }, }); const listBranchesResultAfterDelete = await callTool({ - name: 'list_branches', + name: 'supabase_list_branches', arguments: { project_id: project.id, }, @@ -1169,7 +1169,7 @@ describe('tools', () => { const mainBranch = listBranchesResultAfterDelete[0]; const deleteBranchPromise = callTool({ - name: 'delete_branch', + name: 'supabase_delete_branch', arguments: { branch_id: mainBranch.id, }, @@ -1197,7 +1197,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const result = await callTool({ - name: 'list_branches', + name: 'supabase_list_branches', arguments: { project_id: project.id, }, @@ -1223,7 +1223,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1232,7 +1232,7 @@ describe('tools', () => { }); const branch = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: 'test-branch', @@ -1244,7 +1244,7 @@ describe('tools', () => { const migrationQuery = 'create table sample (id integer generated always as identity primary key)'; await callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: branch.project_ref, name: migrationName, @@ -1253,7 +1253,7 @@ describe('tools', () => { }); const mergeResult = await callTool({ - name: 'merge_branch', + name: 'supabase_merge_branch', arguments: { branch_id: branch.id, }, @@ -1265,7 +1265,7 @@ describe('tools', () => { // Check that the migration was applied to the parent project const listResult = await callTool({ - name: 'list_migrations', + name: 'supabase_list_migrations', arguments: { project_id: project.id, }, @@ -1294,7 +1294,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1303,7 +1303,7 @@ describe('tools', () => { }); const branch = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: 'test-branch', @@ -1315,7 +1315,7 @@ describe('tools', () => { const query = 'create table test_untracked (id integer generated always as identity primary key)'; await callTool({ - name: 'execute_sql', + name: 'supabase_execute_sql', arguments: { project_id: branch.project_ref, query, @@ -1323,7 +1323,7 @@ describe('tools', () => { }); const firstTablesResult = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: branch.project_ref, }, @@ -1334,14 +1334,14 @@ describe('tools', () => { ); await callTool({ - name: 'reset_branch', + name: 'supabase_reset_branch', arguments: { branch_id: branch.id, }, }); const secondTablesResult = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: branch.project_ref, }, @@ -1370,7 +1370,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1379,7 +1379,7 @@ describe('tools', () => { }); const branch = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: 'test-branch', @@ -1391,7 +1391,7 @@ describe('tools', () => { const migrationQuery = 'create table sample (id integer generated always as identity primary key)'; await callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: branch.project_ref, name: migrationName, @@ -1401,7 +1401,7 @@ describe('tools', () => { // Check that migration has been applied to the branch const firstListResult = await callTool({ - name: 'list_migrations', + name: 'supabase_list_migrations', arguments: { project_id: branch.project_ref, }, @@ -1413,7 +1413,7 @@ describe('tools', () => { }); const firstTablesResult = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: branch.project_ref, }, @@ -1424,7 +1424,7 @@ describe('tools', () => { ); await callTool({ - name: 'reset_branch', + name: 'supabase_reset_branch', arguments: { branch_id: branch.id, migration_version: '0', @@ -1433,7 +1433,7 @@ describe('tools', () => { // Check that all migrations have been reverted const secondListResult = await callTool({ - name: 'list_migrations', + name: 'supabase_list_migrations', arguments: { project_id: branch.project_ref, }, @@ -1442,7 +1442,7 @@ describe('tools', () => { expect(secondListResult).toStrictEqual([]); const secondTablesResult = await callTool({ - name: 'list_tables', + name: 'supabase_list_tables', arguments: { project_id: branch.project_ref, }, @@ -1470,7 +1470,7 @@ describe('tools', () => { project.status = 'ACTIVE_HEALTHY'; const confirm_cost_id = await callTool({ - name: 'confirm_cost', + name: 'supabase_confirm_cost', arguments: { type: 'branch', recurrence: 'hourly', @@ -1479,7 +1479,7 @@ describe('tools', () => { }); const branch = await callTool({ - name: 'create_branch', + name: 'supabase_create_branch', arguments: { project_id: project.id, name: 'test-branch', @@ -1491,7 +1491,7 @@ describe('tools', () => { const migrationQuery = 'create table sample (id integer generated always as identity primary key)'; await callTool({ - name: 'apply_migration', + name: 'supabase_apply_migration', arguments: { project_id: project.id, name: migrationName, @@ -1500,7 +1500,7 @@ describe('tools', () => { }); const rebaseResult = await callTool({ - name: 'rebase_branch', + name: 'supabase_rebase_branch', arguments: { branch_id: branch.id, }, @@ -1512,7 +1512,7 @@ describe('tools', () => { // Check that the production migration was applied to the branch const listResult = await callTool({ - name: 'list_migrations', + name: 'supabase_list_migrations', arguments: { project_id: branch.project_ref, }, diff --git a/mcp_servers/supabase/src/server.ts b/mcp_servers/supabase/src/server.ts index 125f7efa..8820bdbd 100644 --- a/mcp_servers/supabase/src/server.ts +++ b/mcp_servers/supabase/src/server.ts @@ -20,24 +20,34 @@ import { } from './regions.js'; import { hashObject } from './util.js'; import { AsyncLocalStorage } from 'async_hooks'; +import { ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'; +import { zodToJsonSchema } from 'zod-to-json-schema'; export const asyncLocalStorage = new AsyncLocalStorage<{ - managementApiClient: ManagementApiClient; + accessToken: string; }>(); -function getManagementApiClient() { - return asyncLocalStorage.getStore()!.managementApiClient; -} - -export function setManagementApiClient(accessToken: string) { - const managementApiClient = createManagementApiClient( +function getManagementApiClient(): ManagementApiClient { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Access token not found in AsyncLocalStorage'); + } + + return createManagementApiClient( 'https://api.supabase.com/', - accessToken, + store.accessToken, { 'User-Agent': `supabase-mcp/${version}`, } ); - return managementApiClient; +} + +export function getAccessToken(): string { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Access token not found in AsyncLocalStorage'); + } + return store.accessToken; } export type SupabasePlatformOptions = { @@ -93,7 +103,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { // Note: tools are intentionally snake_case to align better with most MCP clients tools: { - list_projects: tool({ + supabase_list_projects: tool({ description: 'Lists all Supabase projects for the user.', parameters: z.object({}), execute: async () => { @@ -104,7 +114,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - get_project: tool({ + supabase_get_project: tool({ description: 'Gets details for a Supabase project.', parameters: z.object({ id: z.string().describe('The project ID'), @@ -121,7 +131,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - get_cost: tool({ + supabase_get_cost: tool({ description: 'Gets the cost of creating a new project or branch. Never assume organization as costs can be different for each.', parameters: z.object({ @@ -151,7 +161,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { } }, }), - confirm_cost: tool({ + supabase_confirm_cost: tool({ description: 'Ask the user to confirm their understanding of the cost of creating a new project or branch. Call `get_cost` first. Returns a unique ID for this confirmation which should be passed to `create_project` or `create_branch`.', parameters: z.object({ @@ -163,7 +173,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return await hashObject(cost); }, }), - create_project: tool({ + supabase_create_project: tool({ description: 'Creates a new Supabase project. Always ask the user which organization to create the project in. The project can take a few minutes to initialize - use `get_project` to check the status.', parameters: z.object({ @@ -214,7 +224,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - pause_project: tool({ + supabase_pause_project: tool({ description: 'Pauses a Supabase project.', parameters: z.object({ project_id: z.string(), @@ -234,7 +244,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { assertSuccess(response, 'Failed to pause project'); }, }), - restore_project: tool({ + supabase_restore_project: tool({ description: 'Restores a Supabase project.', parameters: z.object({ project_id: z.string(), @@ -255,7 +265,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { assertSuccess(response, 'Failed to restore project'); }, }), - list_organizations: tool({ + supabase_list_organizations: tool({ description: 'Lists all organizations that the user is a member of.', parameters: z.object({}), execute: async () => { @@ -266,7 +276,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - get_organization: tool({ + supabase_get_organization: tool({ description: 'Gets details for an organization. Includes subscription plan.', parameters: z.object({ @@ -289,7 +299,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - list_tables: tool({ + supabase_list_tables: tool({ description: 'Lists all tables in a schema.', parameters: z.object({ project_id: z.string(), @@ -305,7 +315,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return data; }, }), - list_extensions: tool({ + supabase_list_extensions: tool({ description: 'Lists all extensions in the database.', parameters: z.object({ project_id: z.string(), @@ -316,7 +326,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return data; }, }), - list_migrations: tool({ + supabase_list_migrations: tool({ description: 'Lists all migrations in the database.', parameters: z.object({ project_id: z.string(), @@ -338,7 +348,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - apply_migration: tool({ + supabase_apply_migration: tool({ description: 'Applies a migration to the database. Use this when executing DDL operations.', parameters: z.object({ @@ -371,7 +381,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - execute_sql: tool({ + supabase_execute_sql: tool({ description: 'Executes raw SQL in the Postgres database. Use `apply_migration` instead for DDL operations.', parameters: z.object({ @@ -382,7 +392,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return await executeSql(project_id, query); }, }), - get_logs: tool({ + supabase_get_logs: tool({ description: 'Gets logs for a Supabase project by service type. Use this to help debug problems with your app. This will only return logs within the last minute. If the logs you are looking for are older than 1 minute, re-run your test to reproduce them.', parameters: z.object({ @@ -428,7 +438,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { }, }), - get_project_url: tool({ + supabase_get_project_url: tool({ description: 'Gets the API URL for a project.', parameters: z.object({ project_id: z.string(), @@ -437,7 +447,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return `https://${project_id}.supabase.co`; }, }), - get_anon_key: tool({ + supabase_get_anon_key: tool({ description: 'Gets the anonymous API key for a project.', parameters: z.object({ project_id: z.string(), @@ -468,7 +478,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return anonKey.api_key; }, }), - generate_typescript_types: tool({ + supabase_generate_typescript_types: tool({ description: 'Generates TypeScript types for a project.', parameters: z.object({ project_id: z.string(), @@ -492,7 +502,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { }), // Experimental features - create_branch: tool({ + supabase_create_branch: tool({ description: 'Creates a development branch on a Supabase project. This will apply all migrations from the main project to a fresh branch database. Note that production data will not carry over. The branch will get its own project_id via the resulting project_ref. Use this ID to execute queries and migrations on the branch.', parameters: z.object({ @@ -569,7 +579,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return createBranchResponse.data; }, }), - list_branches: tool({ + supabase_list_branches: tool({ description: 'Lists all development branches of a Supabase project. This will return branch details including status which you can use to check when operations like merge/rebase/reset complete.', parameters: z.object({ @@ -594,7 +604,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - delete_branch: tool({ + supabase_delete_branch: tool({ description: 'Deletes a development branch.', parameters: z.object({ branch_id: z.string(), @@ -616,7 +626,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - merge_branch: tool({ + supabase_merge_branch: tool({ description: 'Merges migrations and edge functions from a development branch to production.', parameters: z.object({ @@ -640,7 +650,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - reset_branch: tool({ + supabase_reset_branch: tool({ description: 'Resets migrations of a development branch. Any untracked data or schema changes will be lost.', parameters: z.object({ @@ -672,7 +682,7 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { return response.data; }, }), - rebase_branch: tool({ + supabase_rebase_branch: tool({ description: 'Rebases a development branch on production. This will effectively run any newer migrations from production onto this branch to help handle migration drift.', parameters: z.object({ @@ -699,5 +709,309 @@ export function createSupabaseMcpServer(options: SupabaseMcpServerOptions) { }, }); + // Override ListTools handler to include annotations with semantic categories + server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [ + // PROJECT + { + name: 'supabase_list_projects', + description: 'Lists all Supabase projects for the user.', + inputSchema: zodToJsonSchema(z.object({})), + annotations: { category: 'SUPABASE_PROJECT', readOnlyHint: true }, + }, + { + name: 'supabase_get_project', + description: 'Gets details for a Supabase project.', + inputSchema: zodToJsonSchema( + z.object({ + id: z.string().describe('The project ID'), + }) + ), + annotations: { category: 'SUPABASE_PROJECT', readOnlyHint: true }, + }, + { + name: 'supabase_create_project', + description: + 'Creates a new Supabase project. Always ask the user which organization to create the project in. The project can take a few minutes to initialize - use `get_project` to check the status.', + inputSchema: zodToJsonSchema( + z.object({ + name: z.string().describe('The name of the project'), + region: z + .optional( + z + .enum(AWS_REGION_CODES) + .describe('The region to create the project in. Defaults to the closest region.') + ), + organization_id: z.string(), + confirm_cost_id: z + .string({ + required_error: + 'User must confirm understanding of costs before creating a project.', + }) + .describe('The cost confirmation ID. Call `confirm_cost` first.'), + }) + ), + annotations: { category: 'SUPABASE_PROJECT' }, + }, + { + name: 'supabase_pause_project', + description: 'Pauses a Supabase project.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_PROJECT' }, + }, + { + name: 'supabase_restore_project', + description: 'Restores a Supabase project.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_PROJECT' }, + }, + { + name: 'supabase_get_project_url', + description: 'Gets the API URL for a project.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_PROJECT', readOnlyHint: true }, + }, + { + name: 'supabase_get_anon_key', + description: 'Gets the anonymous API key for a project.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_PROJECT', readOnlyHint: true }, + }, + + // ORGANIZATION + { + name: 'supabase_list_organizations', + description: 'Lists all organizations that the user is a member of.', + inputSchema: zodToJsonSchema(z.object({})), + annotations: { category: 'SUPABASE_ORGANIZATION', readOnlyHint: true }, + }, + { + name: 'supabase_get_organization', + description: 'Gets details for an organization. Includes subscription plan.', + inputSchema: zodToJsonSchema( + z.object({ + id: z.string().describe('The organization ID'), + }) + ), + annotations: { category: 'SUPABASE_ORGANIZATION', readOnlyHint: true }, + }, + + // PRICING + { + name: 'supabase_get_cost', + description: + 'Gets the cost of creating a new project or branch. Never assume organization as costs can be different for each.', + inputSchema: zodToJsonSchema( + z.object({ + type: z.enum(['project', 'branch']), + organization_id: z.string().describe('The organization ID. Always ask the user.'), + }) + ), + annotations: { category: 'SUPABASE_PRICING', readOnlyHint: true }, + }, + { + name: 'supabase_confirm_cost', + description: + 'Ask the user to confirm their understanding of the cost of creating a new project or branch. Call `get_cost` first. Returns a unique ID for this confirmation which should be passed to `create_project` or `create_branch`.', + inputSchema: zodToJsonSchema( + z.object({ + type: z.enum(['project', 'branch']), + recurrence: z.enum(['hourly', 'monthly']), + amount: z.number(), + }) + ), + annotations: { category: 'SUPABASE_PRICING', readOnlyHint: true }, + }, + + // DATABASE + { + name: 'supabase_list_tables', + description: 'Lists all tables in a schema.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + schemas: z + .optional(z.array(z.string())) + .describe('Optional list of schemas to include. Defaults to all schemas.'), + }) + ), + annotations: { category: 'SUPABASE_DATABASE', readOnlyHint: true }, + }, + { + name: 'supabase_list_extensions', + description: 'Lists all extensions in the database.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_DATABASE', readOnlyHint: true }, + }, + { + name: 'supabase_list_migrations', + description: 'Lists all migrations in the database.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_DATABASE', readOnlyHint: true }, + }, + { + name: 'supabase_apply_migration', + description: 'Applies a migration to the database. Use this when executing DDL operations.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + name: z.string().describe('The name of the migration in snake_case'), + query: z.string().describe('The SQL query to apply'), + }) + ), + annotations: { category: 'SUPABASE_DATABASE' }, + }, + { + name: 'supabase_execute_sql', + description: + 'Executes raw SQL in the Postgres database. Use `apply_migration` instead for DDL operations.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + query: z.string().describe('The SQL query to execute'), + }) + ), + annotations: { category: 'SUPABASE_DATABASE' }, + }, + + // LOGS + { + name: 'supabase_get_logs', + description: + 'Gets logs for a Supabase project by service type. Use this to help debug problems with your app. This will only return logs within the last minute. If the logs you are looking for are older than 1 minute, re-run your test to reproduce them.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + service: z + .enum([ + 'api', + 'branch-action', + 'postgres', + 'edge-function', + 'auth', + 'storage', + 'realtime', + ]) + .describe('The service to fetch logs for'), + }) + ), + annotations: { category: 'SUPABASE_LOGS', readOnlyHint: true }, + }, + + // TYPES + { + name: 'supabase_generate_typescript_types', + description: 'Generates TypeScript types for a project.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_TYPES', readOnlyHint: true }, + }, + + // BRANCH + { + name: 'supabase_create_branch', + description: + 'Creates a development branch on a Supabase project. This will apply all migrations from the main project to a fresh branch database. Note that production data will not carry over. The branch will get its own project_id via the resulting project_ref. Use this ID to execute queries and migrations on the branch.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + name: z.string().default('develop').describe('Name of the branch to create'), + confirm_cost_id: z + .string({ + required_error: + 'User must confirm understanding of costs before creating a branch.', + }) + .describe('The cost confirmation ID. Call `confirm_cost` first.'), + }) + ), + annotations: { category: 'SUPABASE_BRANCH' }, + }, + { + name: 'supabase_list_branches', + description: + 'Lists all development branches of a Supabase project. This will return branch details including status which you can use to check when operations like merge/rebase/reset complete.', + inputSchema: zodToJsonSchema( + z.object({ + project_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_BRANCH', readOnlyHint: true }, + }, + { + name: 'supabase_delete_branch', + description: 'Deletes a development branch.', + inputSchema: zodToJsonSchema( + z.object({ + branch_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_BRANCH' }, + }, + { + name: 'supabase_merge_branch', + description: 'Merges migrations and edge functions from a development branch to production.', + inputSchema: zodToJsonSchema( + z.object({ + branch_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_BRANCH' }, + }, + { + name: 'supabase_reset_branch', + description: + 'Resets migrations of a development branch. Any untracked data or schema changes will be lost.', + inputSchema: zodToJsonSchema( + z.object({ + branch_id: z.string(), + migration_version: z + .string() + .optional() + .describe('Reset your development branch to a specific migration version.'), + }) + ), + annotations: { category: 'SUPABASE_BRANCH' }, + }, + { + name: 'supabase_rebase_branch', + description: + 'Rebases a development branch on production. This will effectively run any newer migrations from production onto this branch to help handle migration drift.', + inputSchema: zodToJsonSchema( + z.object({ + branch_id: z.string(), + }) + ), + annotations: { category: 'SUPABASE_BRANCH' }, + }, + ], + })); + return server; } diff --git a/mcp_servers/supabase/src/sse.ts b/mcp_servers/supabase/src/sse.ts deleted file mode 100644 index e27696d5..00000000 --- a/mcp_servers/supabase/src/sse.ts +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env node - -import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; -import { createSupabaseMcpServer, setManagementApiClient, asyncLocalStorage } from './server.js'; -import express from 'express'; -import * as dotenv from 'dotenv'; - -// Load environment variables -dotenv.config(); - -const server = createSupabaseMcpServer({ - platform: {}, - readOnly: true, -}); - -const app = express(); - -// to support multiple simultaneous connections we have a lookup object from -// sessionId to transport -const transports: { [sessionId: string]: SSEServerTransport } = {}; - -app.get("/sse", async (req, res) => { - const transport = new SSEServerTransport('/messages', res); - transports[transport.sessionId] = transport; - res.on("close", () => { - delete transports[transport.sessionId]; - }); - await server.connect(transport); -}); - -app.post("/messages", async (req, res) => { - const sessionId = req.query.sessionId as string; - const transport = transports[sessionId]; - if (transport) { - // Use environment variable for auth token if set, otherwise use header - const envAuthToken = process.env.SUPABASE_AUTH_TOKEN; - const accessToken = envAuthToken || req.headers['x-auth-token'] as string; - asyncLocalStorage.run({ managementApiClient: setManagementApiClient(accessToken) }, async () => { - await transport.handlePostMessage(req, res); - }); - } else { - res.status(400).send('No transport found for sessionId'); - } -}); - -const PORT = 5000; -app.listen(PORT, () => { - console.log(`Supabase MCP Server running on port ${PORT}`); -}); diff --git a/mcp_servers/supabase/test/llm.e2e.ts b/mcp_servers/supabase/test/llm.e2e.ts index 979bb0f3..62b12b27 100644 --- a/mcp_servers/supabase/test/llm.e2e.ts +++ b/mcp_servers/supabase/test/llm.e2e.ts @@ -108,10 +108,10 @@ describe('llm tests', () => { expect(toolCalls).toHaveLength(2); expect(toolCalls[0]).toEqual( - expect.objectContaining({ toolName: 'list_projects' }) + expect.objectContaining({ toolName: 'supabase_list_projects' }) ); expect(toolCalls[1]).toEqual( - expect.objectContaining({ toolName: 'list_tables' }) + expect.objectContaining({ toolName: 'supabase_list_tables' }) ); await expect(text).toMatchCriteria( diff --git a/mcp_servers/supabase/tsup.config.ts b/mcp_servers/supabase/tsup.config.ts index f932c5e3..3ebd5da4 100644 --- a/mcp_servers/supabase/tsup.config.ts +++ b/mcp_servers/supabase/tsup.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from 'tsup'; export default defineConfig([ { - entry: ['src/index.ts', 'src/sse.ts'], + entry: ['src/mcp.ts'], format: ['cjs', 'esm'], outDir: 'dist', sourcemap: true, diff --git a/mcp_servers/tavily/.env.example b/mcp_servers/tavily/.env.example new file mode 100644 index 00000000..521ca721 --- /dev/null +++ b/mcp_servers/tavily/.env.example @@ -0,0 +1,2 @@ +TAVILY_API_KEY="your_api_key" # API key for Tavily +TAVILY_MCP_SERVER_PORT=5000 # Port for the MCP server to listen on diff --git a/mcp_servers/tavily/Dockerfile b/mcp_servers/tavily/Dockerfile new file mode 100644 index 00000000..bf9b067a --- /dev/null +++ b/mcp_servers/tavily/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy only the requirements first to leverage Docker cache +COPY /mcp_servers/tavily/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the server code +COPY /mcp_servers/tavily/server.py . +COPY /mcp_servers/tavily/tools/ ./tools/ + +# Expose the port the server runs on +EXPOSE 5000 + +# Command to run the MCP server +CMD ["python", "server.py"] diff --git a/mcp_servers/tavily/README.md b/mcp_servers/tavily/README.md new file mode 100644 index 00000000..dd71dd32 --- /dev/null +++ b/mcp_servers/tavily/README.md @@ -0,0 +1,73 @@ +# Tavily MCP Server + +A Model Context Protocol (MCP) server for Tavily AI Search integration. Perform intelligent web searches with AI-powered result summarization and analysis. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to Tavily with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("TAVILY", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/tavily-mcp-server:latest + + +# Run Tavily MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/tavily-mcp-server:latest +``` + +**API Key Setup:** Get your Tavily API key from the [Tavily Dashboard](https://tavily.com/). + +## šŸ› ļø Available Tools + +- **AI Search**: Intelligent web search with AI-powered analysis +- **Result Summarization**: Get concise summaries of search results +- **Source Verification**: Verify information with credible sources +- **Context Extraction**: Extract relevant context from web content +- **Real-time Research**: Access up-to-date information from the web + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/tavily/requirements.txt b/mcp_servers/tavily/requirements.txt new file mode 100644 index 00000000..39b85b90 --- /dev/null +++ b/mcp_servers/tavily/requirements.txt @@ -0,0 +1,9 @@ +tavily-python +python-dotenv +pydantic +mcp[cli]==1.11.0 +starlette +uvicorn[standard] +click +typing-extensions +httpx \ No newline at end of file diff --git a/mcp_servers/tavily/server.py b/mcp_servers/tavily/server.py new file mode 100644 index 00000000..e8374576 --- /dev/null +++ b/mcp_servers/tavily/server.py @@ -0,0 +1,351 @@ +import os +import json +import logging +import contextlib +import base64 +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +from dotenv import load_dotenv +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from tools import ( + tavily_api_key_context, + tavily_search, + tavily_extract, + tavily_crawl, + tavily_map, +) + +# Load env early +load_dotenv() + +logger = logging.getLogger("tavily-mcp-server") +logging.basicConfig(level=logging.INFO) + +TAVILY_MCP_SERVER_PORT = int(os.getenv("TAVILY_MCP_SERVER_PORT", "5000")) + +def extract_api_key(request_or_scope) -> str: + """Extract API key from headers or environment.""" + api_key = os.getenv("API_KEY") + auth_data = None + + if not api_key: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + header_value = request_or_scope.headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + header_value = headers.get(b'x-auth-data') + if header_value: + auth_data = base64.b64decode(header_value).decode('utf-8') + + if auth_data: + try: + # Parse the JSON auth data to extract token + auth_json = json.loads(auth_data) + api_key = auth_json.get('token') or auth_json.get('api_key') or '' + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + api_key = "" + + return api_key or "" + +@click.command() +@click.option("--port", default=TAVILY_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)") +@click.option("--json-response", is_flag=True, default=True, help="Enable JSON responses for StreamableHTTP") +def main(port: int, log_level: str, json_response: bool) -> int: + """Tavily MCP server with SSE + StreamableHTTP transports (LinkedIn-style).""" + logging.getLogger().setLevel(getattr(logging, log_level.upper(), logging.INFO)) + app = Server("tavily-mcp-server") + + # ----------------------------- Tool Registry -----------------------------# + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="tavily_search", + description=( + "Web Search (Tavily) — search the web with filters like topic/news, " + "date range, domains, and optional images/raw content.\n" + "Prompt hints: 'search web for', 'find recent articles about', 'get latest news on'" + ), + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": {"type": "string", "description": "Search query text."}, + "search_depth": { + "type": "string", + "enum": ["basic", "advanced"], + "description": "Depth of search. 'basic' is faster; 'advanced' is deeper.", + "default": "basic", + }, + "topic": { + "type": "string", + "enum": ["general", "news"], + "description": "Bias results to a topic area.", + "default": "general", + }, + "days": {"type": "integer", "description": "Look-back window in days for freshness.", "default": 3}, + "time_range": { + "type": "string", + "enum": ["day", "week", "month", "year"], + "description": "Relative time window.", + }, + "start_date": {"type": "string", "description": "Filter results after YYYY-MM-DD."}, + "end_date": {"type": "string", "description": "Filter results before YYYY-MM-DD."}, + "max_results": { + "type": "integer", + "minimum": 5, + "maximum": 20, + "description": "Number of results to return (5–20).", + "default": 10, + }, + "include_images": {"type": "boolean", "description": "Include relevant images.", "default": False}, + "include_image_descriptions": { + "type": "boolean", + "description": "Include descriptions for images.", + "default": False, + }, + "include_raw_content": { + "type": "boolean", + "description": "Include cleaned raw page content.", + "default": False, + }, + "include_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "Only include results from these domains.", + }, + "exclude_domains": { + "type": "array", + "items": {"type": "string"}, + "description": "Exclude results from these domains.", + }, + "country": {"type": "string", "description": "Country code to bias results (e.g., 'us')."}, + "include_favicon": {"type": "boolean", "description": "Include site favicon URLs.", "default": False}, + # Accept common aliases too (LLM-friendly) + "searchDepth": {"type": "string", "enum": ["basic", "advanced"]}, + "timeRange": {"type": "string", "enum": ["day", "week", "month", "year"]}, + "startDate": {"type": "string"}, + "endDate": {"type": "string"}, + "maxResults": {"type": "integer", "minimum": 5, "maximum": 20}, + "includeImages": {"type": "boolean"}, + "includeImageDescriptions": {"type": "boolean"}, + "includeRawContent": {"type": "boolean"}, + "includeDomains": {"type": "array", "items": {"type": "string"}}, + "excludeDomains": {"type": "array", "items": {"type": "string"}}, + "includeFavicon": {"type": "boolean"}, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "TAVILY_WEB_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="tavily_extract", + description=( + "Extract Web Content — fetch and parse content from one or more URLs. " + "Supports 'basic'/'advanced' depth, markdown/text output, images, and favicons.\n" + "Prompt hints: 'extract content', 'scrape web page', 'fetch page summary'" + ), + inputSchema={ + "type": "object", + "required": ["urls"], + "properties": { + "urls": {"type": "array", "items": {"type": "string"}, "description": "List of absolute URLs."}, + "extract_depth": { + "type": "string", + "enum": ["basic", "advanced"], + "description": "Richer parsing with 'advanced'.", + "default": "basic", + }, + "include_images": {"type": "boolean", "default": False}, + "format": {"type": "string", "enum": ["markdown", "text"], "default": "markdown"}, + "include_favicon": {"type": "boolean", "default": False}, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "TAVILY_WEB_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="tavily_crawl", + description=( + "Crawl Website — start from a root URL and follow links with depth/breadth controls. " + "Returns page content and metadata. Use for small, bounded explorations.\n" + "Prompt hints: 'crawl site', 'explore site structure', 'get all pages under'" + ), + inputSchema={ + "type": "object", + "required": ["url"], + "properties": { + "url": {"type": "string", "description": "Root absolute URL to crawl."}, + "max_depth": {"type": "integer", "minimum": 1, "default": 1}, + "max_breadth": {"type": "integer", "minimum": 1, "default": 20}, + "limit": {"type": "integer", "minimum": 1, "default": 50}, + "instructions": {"type": "string"}, + "select_paths": {"type": "array", "items": {"type": "string"}}, + "select_domains": {"type": "array", "items": {"type": "string"}}, + "allow_external": {"type": "boolean", "default": False}, + "categories": {"type": "array", "items": {"type": "string"}}, + "extract_depth": {"type": "string", "enum": ["basic", "advanced"], "default": "basic"}, + "format": {"type": "string", "enum": ["markdown", "text"], "default": "markdown"}, + "include_favicon": {"type": "boolean", "default": False}, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "TAVILY_WEB_SEARCH", "readOnlyHint": True} + ), + ), + types.Tool( + name="tavily_map", + description=( + "Generate Website Map — discover reachable URLs from a root without extracting heavy content. " + "Great for structure lists.\n" + "Prompt hints: 'map website', 'list all pages on', 'show site structure for'" + ), + inputSchema={ + "type": "object", + "required": ["url"], + "properties": { + "url": {"type": "string", "description": "Root absolute URL to map."}, + "max_depth": {"type": "integer", "minimum": 1, "default": 1}, + "max_breadth": {"type": "integer", "minimum": 1, "default": 20}, + "limit": {"type": "integer", "minimum": 1, "default": 50}, + "instructions": {"type": "string"}, + "select_paths": {"type": "array", "items": {"type": "string"}}, + "select_domains": {"type": "array", "items": {"type": "string"}}, + "allow_external": {"type": "boolean", "default": False}, + "categories": {"type": "array", "items": {"type": "string"}}, + }, + }, + annotations=types.ToolAnnotations( + **{"category": "TAVILY_WEB_SEARCH", "readOnlyHint": True} + ), + ), + ] + + # ---------------------------- Tool Dispatcher ----------------------------# + @app.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]) -> list[types.TextContent]: + logger.info(f"call_tool: {name}") + logger.debug(f"raw arguments: {json.dumps(arguments, indent=2)}") + + try: + if name == "tavily_search": + result = tavily_search(arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + if name == "tavily_extract": + result = tavily_extract(arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + if name == "tavily_crawl": + result = tavily_crawl(arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + if name == "tavily_map": + result = tavily_map(arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + # ------------------------------- Transports ------------------------------# + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + """ + SSE transport endpoint. + If header 'x-auth-token' is present, bind it for the request via ContextVar. + """ + logger.info("Handling SSE connection") + + # Extract API key from headers + api_key = extract_api_key(request) + + token = None + if api_key: + token = tavily_api_key_context.set(api_key) + + try: + async with sse.connect_sse(request.scope, request.receive, request._send) as streams: + await app.run(streams[0], streams[1], app.create_initialization_options()) + finally: + if token: + tavily_api_key_context.reset(token) + return Response() + + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + """ + Streamable HTTP transport endpoint. + Accepts 'x-auth-token' header for per-request auth. + """ + logger.info("Handling StreamableHTTP request") + + # Extract API key from headers + api_key = extract_api_key(scope) + + token = None + if api_key: + token = tavily_api_key_context.set(api_key) + try: + await session_manager.handle_request(scope, receive, send) + finally: + if token: + tavily_api_key_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(_app: Starlette) -> AsyncIterator[None]: + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 + + +if __name__ == "__main__": + main() diff --git a/mcp_servers/tavily/tools/__init__.py b/mcp_servers/tavily/tools/__init__.py new file mode 100644 index 00000000..1aa5a1ca --- /dev/null +++ b/mcp_servers/tavily/tools/__init__.py @@ -0,0 +1,17 @@ +from .auth import tavily_api_key_context, get_tavily_client +from .search import tavily_search +from .extract import tavily_extract +from .crawl import tavily_crawl +from .map import tavily_map + +__all__ = [ + # Auth/context + "tavily_api_key_context", + "get_tavily_client", + + # Tools + "tavily_search", + "tavily_extract", + "tavily_crawl", + "tavily_map", +] diff --git a/mcp_servers/tavily/tools/auth.py b/mcp_servers/tavily/tools/auth.py new file mode 100644 index 00000000..18d0a66b --- /dev/null +++ b/mcp_servers/tavily/tools/auth.py @@ -0,0 +1,37 @@ +import os +import logging +from contextvars import ContextVar + +from dotenv import load_dotenv +from tavily import TavilyClient + +logger = logging.getLogger(__name__) + +# Load environment variables from .env if present +load_dotenv() + +tavily_api_key_context: ContextVar[str] = ContextVar("tavily_api_key") + + +def _get_env_api_key() -> str: + key = os.getenv("TAVILY_API_KEY") + if not key: + raise RuntimeError( + "Tavily API key not found. Set TAVILY_API_KEY env var " + "or provide 'x-auth-token' header to the server." + ) + return key + + +def get_tavily_api_key() -> str: + """Get the Tavily API key for the current request (ContextVar → env).""" + try: + return tavily_api_key_context.get() + except LookupError: + return _get_env_api_key() + + +def get_tavily_client() -> TavilyClient: + """Create a TavilyClient bound to the current request's API key.""" + api_key = get_tavily_api_key() + return TavilyClient(api_key=api_key) diff --git a/mcp_servers/tavily/tools/crawl.py b/mcp_servers/tavily/tools/crawl.py new file mode 100644 index 00000000..3e5d9462 --- /dev/null +++ b/mcp_servers/tavily/tools/crawl.py @@ -0,0 +1,62 @@ +import logging +from typing import Any, Dict, List + +from .auth import get_tavily_client + +logger = logging.getLogger(__name__) + + +def tavily_crawl(arguments: Dict[str, Any]) -> Dict[str, Any]: + """ + Crawl Website — start from a root URL and follow links with depth/breadth controls. + Expected args include: url (required), max_depth, max_breadth, limit, + instructions, select_paths, select_domains, allow_external, categories, + extract_depth, format, include_favicon. + """ + client = get_tavily_client() + + url = arguments.get("url") + if not url: + raise RuntimeError("Parameter 'url' is required for tavily_crawl") + + params: Dict[str, Any] = { + "url": url, + "max_depth": arguments.get("max_depth", 1), + "max_breadth": arguments.get("max_breadth", 20), + "limit": arguments.get("limit", 50), + "instructions": arguments.get("instructions"), + "select_paths": arguments.get("select_paths") or [], + "select_domains": arguments.get("select_domains") or [], + "allow_external": arguments.get("allow_external", False), + "categories": arguments.get("categories") or [], + "extract_depth": arguments.get("extract_depth", "basic"), + "format": arguments.get("format", "markdown"), + "include_favicon": arguments.get("include_favicon", False), + } + + def _b(v: Any) -> Any: + if isinstance(v, bool): + return v + if isinstance(v, str): + s = v.strip().lower() + if s in {"true", "1", "yes", "y"}: + return True + if s in {"false", "0", "no", "n"}: + return False + return v + + def _i(v: Any) -> Any: + if isinstance(v, int): + return v + if isinstance(v, str) and v.strip().isdigit(): + return int(v) + return v + + for k in ("max_depth", "max_breadth", "limit"): + params[k] = _i(params[k]) + + for k in ("allow_external", "include_favicon"): + params[k] = _b(params[k]) + + logger.info(f"tavily_crawl: {url}") + return client.crawl(**{k: v for k, v in params.items() if v is not None}) diff --git a/mcp_servers/tavily/tools/extract.py b/mcp_servers/tavily/tools/extract.py new file mode 100644 index 00000000..fb564adc --- /dev/null +++ b/mcp_servers/tavily/tools/extract.py @@ -0,0 +1,44 @@ +import logging +from typing import Any, Dict, List + +from .auth import get_tavily_client + +logger = logging.getLogger(__name__) + + +def tavily_extract(arguments: Dict[str, Any]) -> Dict[str, Any]: + """ + Extract Web Content — fetch and parse content from one or more URLs. + Expected args: urls (List[str]), extract_depth, include_images, format, include_favicon + """ + client = get_tavily_client() + + urls = arguments.get("urls") + if not urls or not isinstance(urls, list): + raise RuntimeError("Parameter 'urls' (list) is required for tavily_extract") + + params: Dict[str, Any] = { + "urls": urls, + "extract_depth": arguments.get("extract_depth", "basic"), + "include_images": arguments.get("include_images", False), + "format": arguments.get("format", "markdown"), + "include_favicon": arguments.get("include_favicon", False), + } + + # Coerce booleans passed as strings + def _b(v: Any) -> Any: + if isinstance(v, bool): + return v + if isinstance(v, str): + s = v.strip().lower() + if s in {"true", "1", "yes", "y"}: + return True + if s in {"false", "0", "no", "n"}: + return False + return v + + params["include_images"] = _b(params["include_images"]) + params["include_favicon"] = _b(params["include_favicon"]) + + logger.info(f"tavily_extract: {len(urls)} url(https://github.com/s)") + return client.extract(**{k: v for k, v in params.items() if v is not None}) diff --git a/mcp_servers/tavily/tools/map.py b/mcp_servers/tavily/tools/map.py new file mode 100644 index 00000000..de4341bc --- /dev/null +++ b/mcp_servers/tavily/tools/map.py @@ -0,0 +1,58 @@ +import logging +from typing import Any, Dict + +from .auth import get_tavily_client + +logger = logging.getLogger(__name__) + + +def tavily_map(arguments: Dict[str, Any]) -> Dict[str, Any]: + """ + Generate Website Map — discover reachable URLs from a root without heavy extraction. + Expected args include: url (required), max_depth, max_breadth, limit, + instructions, select_paths, select_domains, allow_external, categories. + """ + client = get_tavily_client() + + url = arguments.get("url") + if not url: + raise RuntimeError("Parameter 'url' is required for tavily_map") + + params: Dict[str, Any] = { + "url": url, + "max_depth": arguments.get("max_depth", 1), + "max_breadth": arguments.get("max_breadth", 20), + "limit": arguments.get("limit", 50), + "instructions": arguments.get("instructions"), + "select_paths": arguments.get("select_paths") or [], + "select_domains": arguments.get("select_domains") or [], + "allow_external": arguments.get("allow_external", False), + "categories": arguments.get("categories") or [], + } + + # Coercions + def _b(v: Any) -> Any: + if isinstance(v, bool): + return v + if isinstance(v, str): + s = v.strip().lower() + if s in {"true", "1", "yes", "y"}: + return True + if s in {"false", "0", "no", "n"}: + return False + return v + + def _i(v: Any) -> Any: + if isinstance(v, int): + return v + if isinstance(v, str) and v.strip().isdigit(): + return int(v) + return v + + for k in ("max_depth", "max_breadth", "limit"): + params[k] = _i(params[k]) + + params["allow_external"] = _b(params["allow_external"]) + + logger.info(f"tavily_map: {url}") + return client.map(**{k: v for k, v in params.items() if v is not None}) diff --git a/mcp_servers/tavily/tools/search.py b/mcp_servers/tavily/tools/search.py new file mode 100644 index 00000000..f0f74520 --- /dev/null +++ b/mcp_servers/tavily/tools/search.py @@ -0,0 +1,95 @@ +import logging +from typing import Any, Dict + +from .auth import get_tavily_client + +logger = logging.getLogger(__name__) + +_ALLOWED_KEYS = { + "query", + "search_depth", + "topic", + "days", + "time_range", + "start_date", + "end_date", + "max_results", + "include_images", + "include_image_descriptions", + "include_raw_content", + "include_domains", + "exclude_domains", + "country", + "include_favicon", +} + +_ALIASES = { + "searchDepth": "search_depth", + "timeRange": "time_range", + "startDate": "start_date", + "endDate": "end_date", + "maxResults": "max_results", + "includeImages": "include_images", + "includeImageDescriptions": "include_image_descriptions", + "includeRawContent": "include_raw_content", + "includeDomains": "include_domains", + "excludeDomains": "exclude_domains", + "includeFavicon": "include_favicon", +} + + +def _coerce_bool(v: Any) -> Any: + if isinstance(v, bool): + return v + if isinstance(v, str): + s = v.strip().lower() + if s in {"true", "1", "yes", "y"}: + return True + if s in {"false", "0", "no", "n"}: + return False + return v + + +def _coerce_int(v: Any) -> Any: + if isinstance(v, int): + return v + if isinstance(v, str): + s = v.strip() + if s.isdigit(): + return int(s) + return v + + +def _normalize_args(arguments: Dict[str, Any]) -> Dict[str, Any]: + # apply aliases + norm: Dict[str, Any] = {} + for k, v in arguments.items(): + key = _ALIASES.get(k, k) + norm[key] = v + + # keep only allowed keys + norm = {k: v for k, v in norm.items() if k in _ALLOWED_KEYS} + + # basic coercions + if "max_results" in norm: + norm["max_results"] = _coerce_int(norm["max_results"]) + + for b in ("include_images", "include_image_descriptions", "include_raw_content", "include_favicon"): + if b in norm: + norm[b] = _coerce_bool(norm[b]) + + return norm + + +def tavily_search(arguments: Dict[str, Any]) -> Dict[str, Any]: + """ + Web Search (Tavily). + Arguments: see server list_tools schema; this function expects a plain dict. + Returns a dict as provided by Tavily's client. + """ + client = get_tavily_client() + params = _normalize_args(arguments) + if "query" not in params or not params["query"]: + raise RuntimeError("Parameter 'query' is required for tavily_search") + logger.info(f"tavily_search: {params.get('query')}") + return client.search(**params) diff --git a/mcp_servers/trello/.env.example b/mcp_servers/trello/.env.example new file mode 100644 index 00000000..68869767 --- /dev/null +++ b/mcp_servers/trello/.env.example @@ -0,0 +1,2 @@ +TRELLO_API_KEY= +TRELLO_API_TOKEN= diff --git a/mcp_servers/trello/Dockerfile b/mcp_servers/trello/Dockerfile new file mode 100644 index 00000000..3e5907ea --- /dev/null +++ b/mcp_servers/trello/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 5002 + +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/trello/README.md b/mcp_servers/trello/README.md new file mode 100644 index 00000000..ed7c9d5e --- /dev/null +++ b/mcp_servers/trello/README.md @@ -0,0 +1,72 @@ +# Trello MCP Server + +A Model Context Protocol (MCP) server for Trello integration. Manage boards, lists, cards, and checklists using Trello's API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Build the Docker image +docker build -t trello-mcp-server . + +# Run the server with your credentials +docker run -p 5002:5002 -e TRELLO_API_KEY="your_key" -e TRELLO_API_TOKEN="your_token" trello-mcp-server +``` + +### šŸ’» Local Development + +```bash +# Navigate to the server directory +cd mcp_servers/trello + +# Create and activate virtual environment +uv venv +.venv\Scripts\activate + +# Install dependencies +uv pip install -r requirements.txt + +# Create a .env file with your Trello credentials +# TRELLO_API_KEY=your_key +# TRELLO_API_TOKEN=your_token + +# Run the server +python server.py +``` + +## šŸ› ļø Available Tools + +### Board Operations +- `trello_get_my_boards`: Fetches all boards that the user is a member of. +- `trello_create_board`: Creates a new board. + +### List Operations +- `trello_get_board_lists`: Fetches all lists in a specific board. + +### Card Operations +- `trello_get_list_cards`: Fetches all cards in a specific list. +- `trello_create_card`: Creates a new card in a specific list. +- `trello_update_card`: Updates an existing card. +- `trello_delete_card`: Deletes a card. + +### Checklist Operations +- `trello_create_checklist`: Creates a new checklist on a specific card. +- `trello_add_item_to_checklist`: Adds an item to a checklist. +- `trello_update_checklist_item_state`: Updates an item's state in a checklist (e.g., to 'complete'). + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. diff --git a/mcp_servers/trello/requirements.txt b/mcp_servers/trello/requirements.txt new file mode 100644 index 00000000..5030aa1b --- /dev/null +++ b/mcp_servers/trello/requirements.txt @@ -0,0 +1,8 @@ +mcp>=1.12.0 +pydantic +typing-extensions +httpx +click +python-dotenv +starlette +uvicorn[standard] diff --git a/mcp_servers/trello/server.py b/mcp_servers/trello/server.py new file mode 100644 index 00000000..b3dc5309 --- /dev/null +++ b/mcp_servers/trello/server.py @@ -0,0 +1,359 @@ +import contextlib +import json +import logging +import os +import asyncio +from collections.abc import AsyncIterator +from typing import Callable, Awaitable, Any + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.routing import Mount +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools.base import init_http_clients, close_http_clients +from tools import ( + get_my_boards, + create_board, + get_board_lists, + get_list_cards, + create_card, + update_card, + delete_card, + create_checklist, + add_item_to_checklist, + update_checklist_item_state, +) + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +TRELLO_MCP_SERVER_PORT = int(os.getenv("TRELLO_MCP_SERVER_PORT", "5002")) + +def get_all_tools() -> list[types.Tool]: + """Returns a list of all tool definitions.""" + return [ + types.Tool( + name="trello_get_my_boards", + description="Fetches all boards that the user is a member of.", + inputSchema={"type": "object", "properties": {}} + ), + types.Tool( + name="trello_create_board", + description="Creates a new board in Trello.", + inputSchema={ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the board." + }, + "desc": { + "type": "string", + "description": "The description of the board." + } + }, + "required": ["name"] + } + ), + types.Tool( + name="trello_get_board_lists", + description="Fetches all lists in a specific board.", + inputSchema={ + "type": "object", + "properties": { + "board_id": { + "type": "string", + "description": "The ID of the board to get the lists from." + } + }, + "required": ["board_id"] + } + ), + types.Tool( + name="trello_get_list_cards", + description="Fetches all cards in a specific list.", + inputSchema={ + "type": "object", + "properties": { + "list_id": { + "type": "string", + "description": "The ID of the list to get the cards from." + } + }, + "required": ["list_id"] + } + ), + types.Tool( + name="trello_create_card", + description="Creates a new card in a specific list.", + inputSchema={ + "type": "object", + "properties": { + "idList": { + "type": "string", + "description": "The ID of the list to create the card in." + }, + "name": { + "type": "string", + "description": "The name of the card." + }, + "desc": { + "type": "string", + "description": "The description of the card." + } + }, + "required": ["idList", "name"] + } + ), + types.Tool( + name="trello_update_card", + description="Updates a card in Trello.", + inputSchema={ + "type": "object", + "properties": { + "card_id": { + "type": "string", + "description": "The ID of the card to update." + }, + "name": { + "type": "string", + "description": "The new name of the card." + }, + "desc": { + "type": "string", + "description": "The new description of the card." + }, + "idList": { + "type": "string", + "description": "The new list ID for the card." + } + }, + "required": ["card_id"] + } + ), + types.Tool( + name="trello_delete_card", + description="Deletes a card in Trello.", + inputSchema={ + "type": "object", + "properties": { + "card_id": { + "type": "string", + "description": "The ID of the card to delete." + } + }, + "required": ["card_id"] + } + ), + types.Tool( + name="trello_create_checklist", + description="Creates a new checklist on a specific card.", + inputSchema={ + "type": "object", + "properties": { + "idCard": { + "type": "string", + "description": "The ID of the card to add the checklist to." + }, + "name": { + "type": "string", + "description": "The name of the checklist." + } + }, + "required": ["idCard", "name"] + } + ), + types.Tool( + name="trello_add_item_to_checklist", + description="Adds a new item to a specific checklist.", + inputSchema={ + "type": "object", + "properties": { + "idChecklist": { + "type": "string", + "description": "The ID of the checklist to add the item to." + }, + "name": { + "type": "string", + "description": "The name of the checklist item." + }, + "checked": { + "type": "boolean", + "description": "Whether the item should be checked (default: false)." + } + }, + "required": ["idChecklist", "name"] + } + ), + types.Tool( + name="trello_update_checklist_item_state", + description="Updates the state of an item on a checklist (e.g., 'complete' or 'incomplete').", + inputSchema={ + "type": "object", + "properties": { + "idCard": { + "type": "string", + "description": "The ID of the card containing the checklist item." + }, + "idCheckItem": { + "type": "string", + "description": "The ID of the checklist item to update." + }, + "state": { + "type": "string", + "description": "The new state of the item. Must be 'complete' or 'incomplete'.", + "enum": ["complete", "incomplete"] + } + }, + "required": ["idCard", "idCheckItem", "state"] + } + ), + ] + +async def call_tool_router(name: str, arguments: dict) -> Any: + """Unified router for all tool calls.""" + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + tool_map: dict[str, Callable[..., Awaitable[Any]]] = { + "trello_get_my_boards": get_my_boards, + "trello_create_board": create_board, + "trello_get_board_lists": get_board_lists, + "trello_get_list_cards": get_list_cards, + "trello_create_card": create_card, + "trello_update_card": update_card, + "trello_delete_card": delete_card, + "trello_create_checklist": create_checklist, + "trello_add_item_to_checklist": add_item_to_checklist, + "trello_update_checklist_item_state": update_checklist_item_state, + } + + tool_func = tool_map.get(name) + if not tool_func: + raise ValueError(f"Unknown tool: {name}") + + return await tool_func(**arguments) + +@click.command() +@click.option("--port", default=TRELLO_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +@click.option( + "--stdio", + is_flag=True, + default=False, + help="Run in stdio mode for Claude Desktop (instead of HTTP server mode)", +) +def main( + port: int, + log_level: str, + json_response: bool, + stdio: bool, +) -> int: + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + if stdio: + return run_stdio_mode() + else: + return run_http_mode(port, json_response) + +def run_stdio_mode() -> int: + """Run the MCP server in stdio mode.""" + logger.info("Trello MCP Server initializing in stdio mode...") + app = Server("trello-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return get_all_tools() + + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent]: + try: + await init_http_clients() + result = await call_tool_router(name, arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.error(f"Error calling tool {name}: {e}") + error_response = {"error": str(e), "tool": name} + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + finally: + await close_http_clients() + + async def run_server(): + logger.info("Starting stdio server...") + async with stdio_server() as (read_stream, write_stream): + await app.run(read_stream, write_stream, app.create_initialization_options()) + + asyncio.run(run_server()) + return 0 + +def run_http_mode(port: int, json_response: bool) -> int: + """Run the MCP server in HTTP mode.""" + app = Server("trello-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return get_all_tools() + + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent]: + try: + result = await call_tool_router(name, arguments) + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None: + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + await init_http_clients() + async with session_manager.run(): + logger.info("Trello MCP Server started in HTTP mode!") + try: + yield + finally: + logger.info("Trello MCP Server shutting down...") + await close_http_clients() + + starlette_app = Starlette( + debug=True, + routes=[Mount("/mcp", app=handle_streamable_http)], + lifespan=lifespan, + ) + + logger.info(f"Server starting on http://0.0.0.0:{port}/mcp") + import uvicorn + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/trello/tools/__init__.py b/mcp_servers/trello/tools/__init__.py new file mode 100644 index 00000000..03daf905 --- /dev/null +++ b/mcp_servers/trello/tools/__init__.py @@ -0,0 +1,23 @@ +from .base import get_trello_client +from .boards import get_my_boards, create_board +from .lists import get_board_lists +from .cards import get_list_cards, create_card, update_card, delete_card +from .checklists import create_checklist, add_item_to_checklist, update_checklist_item_state + +__all__ = [ + "get_trello_client", + # Board tools + "get_my_boards", + "create_board", + # List tools + "get_board_lists", + # Card tools + "get_list_cards", + "create_card", + "update_card", + "delete_card", + # Checklist tools + "create_checklist", + "add_item_to_checklist", + "update_checklist_item_state", +] diff --git a/mcp_servers/trello/tools/base.py b/mcp_servers/trello/tools/base.py new file mode 100644 index 00000000..f48c6aee --- /dev/null +++ b/mcp_servers/trello/tools/base.py @@ -0,0 +1,39 @@ +import os +import httpx +from dotenv import load_dotenv + +load_dotenv() + +TRELLO_API_KEY = os.getenv("TRELLO_API_KEY") +TRELLO_API_TOKEN = os.getenv("TRELLO_API_TOKEN") +TRELLO_API_URL = "https://api.trello.com/1" + +class TrelloClient: + def __init__(self): + self.client = httpx.AsyncClient( + base_url=TRELLO_API_URL, + params={ + "key": TRELLO_API_KEY, + "token": TRELLO_API_TOKEN, + }, + ) + + async def close(self): + await self.client.aclose() + + async def make_request(self, method: str, endpoint: str, **kwargs): + response = await self.client.request(method, endpoint, **kwargs) + response.raise_for_status() + return response.json() + +http_clients: dict[str, TrelloClient] = {} + +async def init_http_clients(): + http_clients["trello"] = TrelloClient() + +async def close_http_clients(): + for client in http_clients.values(): + await client.close() + +def get_trello_client() -> TrelloClient: + return http_clients["trello"] diff --git a/mcp_servers/trello/tools/boards.py b/mcp_servers/trello/tools/boards.py new file mode 100644 index 00000000..a61b7efe --- /dev/null +++ b/mcp_servers/trello/tools/boards.py @@ -0,0 +1,15 @@ +from typing import Optional +from .base import get_trello_client + +async def get_my_boards(): + """Fetches all boards that the user is a member of.""" + client = get_trello_client() + return await client.make_request("GET", "/members/me/boards") + +async def create_board(name: str, desc: Optional[str] = None): + """Creates a new board in Trello.""" + client = get_trello_client() + json_data = {"name": name} + if desc: + json_data["desc"] = desc + return await client.make_request("POST", "/boards", json=json_data) diff --git a/mcp_servers/trello/tools/cards.py b/mcp_servers/trello/tools/cards.py new file mode 100644 index 00000000..c2bfa5db --- /dev/null +++ b/mcp_servers/trello/tools/cards.py @@ -0,0 +1,35 @@ +from typing import Optional +from .base import get_trello_client + +async def get_list_cards(list_id: str): + """Fetches all cards in a specific list.""" + client = get_trello_client() + return await client.make_request("GET", f"/lists/{list_id}/cards") + +async def create_card(idList: str, name: str, desc: Optional[str] = None): + """Creates a new card in a specific list.""" + client = get_trello_client() + json_data = { + "idList": idList, + "name": name, + } + if desc: + json_data["desc"] = desc + return await client.make_request("POST", "/cards", json=json_data) + +async def update_card(card_id: str, name: Optional[str] = None, desc: Optional[str] = None, idList: Optional[str] = None): + """Updates a card in Trello.""" + client = get_trello_client() + json_data = {} + if name: + json_data["name"] = name + if desc: + json_data["desc"] = desc + if idList: + json_data["idList"] = idList + return await client.make_request("PUT", f"/cards/{card_id}", json=json_data) + +async def delete_card(card_id: str): + """Deletes a card in Trello.""" + client = get_trello_client() + return await client.make_request("DELETE", f"/cards/{card_id}") diff --git a/mcp_servers/trello/tools/checklists.py b/mcp_servers/trello/tools/checklists.py new file mode 100644 index 00000000..933e4b18 --- /dev/null +++ b/mcp_servers/trello/tools/checklists.py @@ -0,0 +1,19 @@ +from .base import get_trello_client + +async def create_checklist(idCard: str, name: str): + """Creates a new checklist on a specific card.""" + client = get_trello_client() + return await client.make_request("POST", f"/cards/{idCard}/checklists", params={"name": name}) + +async def add_item_to_checklist(idChecklist: str, name: str, checked: bool = False): + """Adds a new item to a specific checklist.""" + client = get_trello_client() + params = {"name": name, "checked": checked} + return await client.make_request("POST", f"/checklists/{idChecklist}/checkItems", params=params) + +async def update_checklist_item_state(idCard: str, idCheckItem: str, state: str): + """Updates the state of an item on a checklist (e.g., 'complete' or 'incomplete').""" + client = get_trello_client() + # Note: The endpoint for updating a checkitem is unusual and requires the card ID. + # It's PUT /1/cards/{idCard}/checkItem/{idCheckItem} + return await client.make_request("PUT", f"/cards/{idCard}/checkItem/{idCheckItem}", params={"state": state}) diff --git a/mcp_servers/trello/tools/lists.py b/mcp_servers/trello/tools/lists.py new file mode 100644 index 00000000..09f81654 --- /dev/null +++ b/mcp_servers/trello/tools/lists.py @@ -0,0 +1,6 @@ +from .base import get_trello_client + +async def get_board_lists(board_id: str): + """Fetches all lists in a specific board.""" + client = get_trello_client() + return await client.make_request("GET", f"/boards/{board_id}/lists") diff --git a/mcp_servers/tsconfig.json b/mcp_servers/tsconfig.json new file mode 100644 index 00000000..a40134b4 --- /dev/null +++ b/mcp_servers/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "esModuleInterop": true, + "outDir": "./dist", + "rootDir": ".", + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "strict": true, + "declaration": true + }, + "include": [ + "./**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/mcp_servers/twilio/.env.example b/mcp_servers/twilio/.env.example new file mode 100644 index 00000000..09283dcf --- /dev/null +++ b/mcp_servers/twilio/.env.example @@ -0,0 +1,7 @@ +# Twilio API Credentials (Required) +# Get these from your Twilio Console: https://console.twilio.com +TWILIO_ACCOUNT_SID=your_account_sid_here +TWILIO_AUTH_TOKEN=your_auth_token_here + +# Server Configuration (Optional) +TWILIO_MCP_SERVER_PORT=5000 diff --git a/mcp_servers/twilio/Dockerfile b/mcp_servers/twilio/Dockerfile new file mode 100644 index 00000000..b50d2dcd --- /dev/null +++ b/mcp_servers/twilio/Dockerfile @@ -0,0 +1,36 @@ +FROM python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Create a non-root user +RUN useradd --create-home --shell /bin/bash app +RUN chown -R app:app /app +USER app + +# Expose the default port +EXPOSE 5000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:5000/ || exit 1 + +# Set default environment variables +ENV TWILIO_MCP_SERVER_PORT=5000 + +# Run the server +CMD ["python", "server.py"] \ No newline at end of file diff --git a/mcp_servers/twilio/README.md b/mcp_servers/twilio/README.md new file mode 100644 index 00000000..95e287f6 --- /dev/null +++ b/mcp_servers/twilio/README.md @@ -0,0 +1,383 @@ +# Twilio MCP Server + +A comprehensive Model Context Protocol (MCP) server implementation that provides full integration with Twilio's communication APIs. This server enables AI agents to send SMS/MMS messages, make voice calls, manage phone numbers, and monitor account usage through a set of atomic, well-designed tools. + +## Features + +- **15 Comprehensive Tools**: Complete coverage of Twilio's communication APIs +- **SMS & MMS Messaging**: Send text messages and multimedia content +- **Voice Calls**: Initiate calls with TwiML control and call management +- **Phone Number Management**: Search, purchase, configure, and release phone numbers +- **Account Monitoring**: Check balances, usage records, and account information +- **Dual Mode Architecture**: Supports both Claude Desktop (stdio) and HTTP server modes +- **Secure Authentication**: Context-aware token management with environment variables +- **Detailed Logging**: Configurable logging with rich operational context +- **Error Handling**: Comprehensive error handling with actionable error messages + +## Tools Overview + +The server provides 15 atomic tools organized into four main categories: + +### Messaging Operations +- `twilio_send_sms`: Send SMS messages with delivery tracking +- `twilio_send_mms`: Send multimedia messages with up to 10 attachments +- `twilio_get_messages`: Retrieve message history with flexible filtering +- `twilio_get_message_by_sid`: Get detailed information about specific messages + +### Voice Operations +- `twilio_make_call`: Initiate phone calls with TwiML instructions +- `twilio_get_calls`: Retrieve call history with status filtering +- `twilio_get_call_by_sid`: Get detailed call information including duration and costs +- `twilio_get_recordings`: Access call recordings for analysis + +### Phone Number Management +- `twilio_search_available_numbers`: Find available numbers by area code or pattern +- `twilio_purchase_phone_number`: Purchase numbers with webhook configuration +- `twilio_list_phone_numbers`: View all owned phone numbers and their settings +- `twilio_update_phone_number`: Modify number configurations and webhooks +- `twilio_release_phone_number`: Release numbers to stop billing + +### Account & Usage Monitoring +- `twilio_get_account_info`: Retrieve account details and status +- `twilio_get_balance`: Check current account balance +- `twilio_get_usage_records`: Generate detailed usage reports by category and time period + +## Installation & Setup + +### Prerequisites + +1. **Twilio Account**: Sign up at [twilio.com](https://www.twilio.com) +2. **API Credentials**: Obtain your Account SID and Auth Token from the Twilio Console +3. **Python 3.8+**: Required for running the server +4. **Phone Number**: Purchase at least one Twilio phone number for sending messages/calls + +### Quick Setup Guide + +#### Step 1: Get Twilio Credentials +1. Sign up or log in to [Twilio Console](https://console.twilio.com) +2. Navigate to **Account Dashboard** +3. Copy your **Account SID** and **Auth Token** +4. (Optional) Purchase a phone number from **Phone Numbers > Manage > Buy a number** + +#### Step 2: Install Dependencies +```bash +# Navigate to the Twilio MCP server directory +cd mcp_servers/twilio + +# Create virtual environment (recommended) +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt +``` + +#### Step 3: Configure Environment Variables +1. **Copy the example environment file**: + ```bash + cp .env.example .env + ``` + +2. **Edit the `.env` file** with your Twilio credentials: + ```bash + # Open .env in your preferred editor + nano .env # or vim .env, or code .env + ``` + + Add your credentials: + ``` + TWILIO_ACCOUNT_SID=your_account_sid_here + TWILIO_AUTH_TOKEN=your_auth_token_here + TWILIO_MCP_SERVER_PORT=5000 + ``` + +## Running the Server + +The Twilio MCP server supports two modes: + +### Mode 1: Claude Desktop Integration (stdio) +For use with Claude Desktop or other MCP clients that use stdio transport: + +```bash +python server.py --stdio +``` + +**Claude Desktop Configuration:** +Add this to your `claude_desktop_config.json`: +```json +{ + "mcpServers": { + "twilio": { + "command": "/path/to/your/venv/bin/python", + "args": ["/path/to/mcp_servers/twilio/server.py", "--stdio"], + "env": { + "TWILIO_ACCOUNT_SID": "your_account_sid_here", + "TWILIO_AUTH_TOKEN": "your_auth_token_here" + } + } + } +} +``` + +### Mode 2: HTTP Server (default) +For API testing, web integration, or other MCP clients that use HTTP transport: + +```bash +python server.py +# Server runs on http://localhost:5000 +``` + +**Custom Configuration Options:** +```bash +# Custom port and logging level +python server.py --port 8080 --log-level DEBUG + +# Enable JSON responses instead of SSE streams +python server.py --json-response +``` + +### Docker Installation + +For containerized deployment: + +1. **Set up environment variables** (follow Step 3 above): + ```bash + cp .env.example .env + # Edit .env with your Twilio credentials + ``` + +2. **Build the Docker image**: + ```bash + docker build -t twilio-mcp-server . + ``` + +3. **Run in HTTP mode** (default): + ```bash + docker run -p 5000:5000 --env-file .env twilio-mcp-server + ``` + +4. **Run in stdio mode** (for MCP client integration): + ```bash + docker run --env-file .env twilio-mcp-server --stdio + ``` + +## Testing Your Setup + +### Quick Test Commands + +1. **Test HTTP mode** (server running on localhost:5000): + ```bash + # Health check + curl http://localhost:5000/ + + # List available tools + curl -X POST http://localhost:5000/ \ + -H "Content-Type: application/json" \ + -d '{"method": "tools/list"}' + + # Test account info (requires credentials in .env) + curl -X POST http://localhost:5000/ \ + -H "Content-Type: application/json" \ + -d '{"method": "tools/call", "params": {"name": "twilio_get_account_info", "arguments": {}}}' + ``` + +2. **Test Claude Desktop Integration**: + - Add the server to your Claude Desktop config + - Restart Claude Desktop + - Ask Claude: "Can you check my Twilio account balance?" + +## Usage Examples + +### Available Server Endpoints + +**HTTP Mode (default):** +- **Health Check**: `GET http://localhost:5000/` +- **SSE Streaming**: `GET http://localhost:5000/sse` +- **StreamableHTTP**: `POST http://localhost:5000/mcp` + +**Stdio Mode:** +- Communicates via standard input/output for MCP client integration + +### Example Tool Calls + +#### Send an SMS Message +```json +{ + "tool": "twilio_send_sms", + "arguments": { + "to": "+1234567890", + "from_": "+1987654321", + "body": "Hello from Twilio MCP Server!" + } +} +``` + +#### Make a Voice Call +```json +{ + "tool": "twilio_make_call", + "arguments": { + "to": "+1234567890", + "from_": "+1987654321", + "twiml": "Hello, this is a test call from Twilio!" + } +} +``` + +#### Search for Available Numbers +```json +{ + "tool": "twilio_search_available_numbers", + "arguments": { + "country_code": "US", + "area_code": "415", + "sms_enabled": true, + "voice_enabled": true, + "limit": 10 + } +} +``` + +#### Check Account Usage +```json +{ + "tool": "twilio_get_usage_records", + "arguments": { + "category": "sms", + "granularity": "daily", + "start_date": "2024-01-01", + "end_date": "2024-01-31" + } +} +``` + +## Error Handling & Troubleshooting + +The server provides detailed error information to help diagnose issues: + +### Common Error Scenarios + +1. **Authentication Errors**: + - Verify `TWILIO_ACCOUNT_SID` and `TWILIO_AUTH_TOKEN` are correct + - Check that credentials haven't expired or been rotated + +2. **Phone Number Format Errors**: + - Ensure numbers are in E.164 format (e.g., `+1234567890`) + - US numbers: 11 digits total including country code + +3. **Permission Errors**: + - Verify your Twilio account has sufficient permissions + - Check if services (SMS, Voice) are enabled in your region + +4. **Rate Limiting**: + - Twilio has rate limits on API calls and messaging + - Implement exponential backoff for production use + + +### Debugging Tips + +1. **Enable Debug Logging**: + ```bash + python server.py --log-level DEBUG + ``` + +2. **Test with Twilio Console**: + - Use Twilio's REST API Explorer to test credentials + - Send test messages through the Console first + +## Development & Testing + +### Local Development + +1. **Set up development environment**: + ```bash + python -m venv venv + source venv/bin/activate # On Windows: venv\Scripts\activate + pip install -r requirements.txt + ``` + +2. **Use Twilio Test Credentials**: + - Use Twilio's test credentials for development + - Test credentials don't send real messages or make real calls + +## Quick Start + +### Option 1: Local Installation +```bash +# 1. Navigate to the Twilio MCP server directory +cd mcp_servers/twilio + +# 2. Create virtual environment and install dependencies +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate +pip install -r requirements.txt + +# 3. Set up your environment +cp .env.example .env +# Edit .env with your Twilio credentials + +# 4. Run the server +python server.py --stdio # For Claude Desktop +# OR +python server.py # For HTTP API testing +``` + +### Option 2: Docker Installation +```bash +# 1. Set up environment and build +cp .env.example .env +# Edit .env with your Twilio credentials +docker build -t twilio-mcp-server . + +# 2. Run the server +docker run -p 5000:5000 --env-file .env twilio-mcp-server +``` + +### Option 3: Claude Desktop Integration +1. Follow local installation steps 1-3 above +2. Add to your `claude_desktop_config.json`: +```json +{ + "mcpServers": { + "twilio": { + "command": "/absolute/path/to/your/venv/bin/python", + "args": ["/absolute/path/to/mcp_servers/twilio/server.py", "--stdio"], + "env": { + "TWILIO_ACCOUNT_SID": "your_account_sid_here", + "TWILIO_AUTH_TOKEN": "your_auth_token_here" + } + } + } +} +``` +3. Restart Claude Desktop +4. Ask Claude: "Can you check my Twilio account info?" + +## Contributing + +We welcome contributions! Please see the main [Contributing Guide](../../CONTRIBUTING.md) for details on: + +- Code style guidelines +- Pull request process +- Testing requirements +- Documentation standards + +### Twilio-Specific Contribution Guidelines + +- Test all tools with both valid and invalid inputs +- Ensure proper error handling for Twilio API errors +- Update documentation for any new Twilio features +- Include usage examples for new tools + +## License + +This project is licensed under the Apache 2.0 license - see the [LICENSE](../../LICENSE) file for details. + +## Support & Resources + +- **Twilio Documentation**: [https://www.twilio.com/docs](https://www.twilio.com/docs) +- **Twilio Console**: [https://console.twilio.com](https://console.twilio.com) +- **MCP Specification**: [https://modelcontextprotocol.io](https://modelcontextprotocol.io) +- **Issues**: [Report bugs or request features](https://github.com/klavis-ai/klavis/issues) + +--- \ No newline at end of file diff --git a/mcp_servers/twilio/requirements.txt b/mcp_servers/twilio/requirements.txt new file mode 100644 index 00000000..be96a788 --- /dev/null +++ b/mcp_servers/twilio/requirements.txt @@ -0,0 +1,7 @@ +mcp==1.11.0 +click>=8.0.0 +python-dotenv>=0.19.0 +starlette>=0.40.0 +twilio>=9.0.0 +pydantic>=2.0.0 +uvicorn>=0.30.0 \ No newline at end of file diff --git a/mcp_servers/twilio/server.py b/mcp_servers/twilio/server.py new file mode 100644 index 00000000..7440a4e6 --- /dev/null +++ b/mcp_servers/twilio/server.py @@ -0,0 +1,722 @@ +import contextlib +import logging +import os +import json +import asyncio +from collections.abc import AsyncIterator +from typing import Any, Dict + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from dotenv import load_dotenv + +from tools import ( + auth_token_context, + # Messaging + twilio_send_sms, + twilio_send_mms, + twilio_get_messages, + twilio_get_message_by_sid, + # Voice + twilio_make_call, + twilio_get_calls, + twilio_get_call_by_sid, + twilio_get_recordings, + # Phone Numbers + twilio_search_available_numbers, + twilio_purchase_phone_number, + twilio_list_phone_numbers, + twilio_update_phone_number, + twilio_release_phone_number, + # Account + twilio_get_account_info, + twilio_get_usage_records, + twilio_get_balance, +) + +load_dotenv() + +# Configure logging +logger = logging.getLogger(__name__) + +TWILIO_MCP_SERVER_PORT = int(os.getenv("TWILIO_MCP_SERVER_PORT", "5000")) + +def get_all_tools() -> list[types.Tool]: + """Get all tool definitions to avoid duplication between stdio and HTTP modes.""" + return [ + # Messaging Tools + types.Tool( + name="twilio_send_sms", + description="Send an SMS message to a phone number using Twilio. Perfect for sending text notifications, alerts, or confirmations to users.", + inputSchema={ + "type": "object", + "properties": { + "to": { + "type": "string", + "description": "Recipient phone number in E.164 format (e.g., +1234567890)" + }, + "from_": { + "type": "string", + "description": "Sender phone number (must be a Twilio phone number you own)" + }, + "body": { + "type": "string", + "description": "Message content (up to 1600 characters)" + }, + "status_callback": { + "type": "string", + "description": "Optional webhook URL to receive delivery status updates" + } + }, + "required": ["to", "from_", "body"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_MESSAGING"}), + ), + types.Tool( + name="twilio_send_mms", + description="Send an MMS message with media attachments (images, videos, PDFs) using Twilio. Use this when you need to send visual content along with or instead of text.", + inputSchema={ + "type": "object", + "properties": { + "to": { + "type": "string", + "description": "Recipient phone number in E.164 format" + }, + "from_": { + "type": "string", + "description": "Sender phone number (must be a Twilio phone number you own)" + }, + "body": { + "type": "string", + "description": "Optional message text to accompany the media" + }, + "media_url": { + "type": "array", + "items": {"type": "string"}, + "description": "List of media URLs to attach (max 10 attachments)" + }, + "status_callback": { + "type": "string", + "description": "Optional webhook URL to receive delivery status updates" + } + }, + "required": ["to", "from_"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_MESSAGING"}), + ), + types.Tool( + name="twilio_get_messages", + description="Retrieve a list of SMS/MMS messages from your Twilio account. Use this to check message history, delivery status, or find specific conversations.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of messages to retrieve (default 20, max 1000)", + "default": 20 + }, + "date_sent_after": { + "type": "string", + "description": "ISO date string to filter messages sent after this date (e.g., '2024-01-01')" + }, + "date_sent_before": { + "type": "string", + "description": "ISO date string to filter messages sent before this date" + }, + "from_": { + "type": "string", + "description": "Filter by sender phone number" + }, + "to": { + "type": "string", + "description": "Filter by recipient phone number" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_MESSAGING", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_get_message_by_sid", + description="Retrieve detailed information about a specific message using its unique SID. Use this when you need complete details about a particular message including delivery status and error information.", + inputSchema={ + "type": "object", + "properties": { + "message_sid": { + "type": "string", + "description": "Unique identifier (SID) for the message" + } + }, + "required": ["message_sid"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_MESSAGING", "readOnlyHint": True}), + ), + + # Voice Call Tools + types.Tool( + name="twilio_make_call", + description="Initiate a phone call using Twilio. You must provide either a TwiML URL or TwiML instructions to control what happens during the call (e.g., play message, collect input, record).", + inputSchema={ + "type": "object", + "properties": { + "to": { + "type": "string", + "description": "Phone number to call in E.164 format" + }, + "from_": { + "type": "string", + "description": "Caller phone number (must be a Twilio phone number you own)" + }, + "url": { + "type": "string", + "description": "URL that returns TwiML instructions for the call" + }, + "twiml": { + "type": "string", + "description": "TwiML instructions as a string (alternative to url)" + }, + "method": { + "type": "string", + "description": "HTTP method for the webhook (GET or POST, default POST)" + }, + "status_callback": { + "type": "string", + "description": "URL to receive call status updates" + }, + "timeout": { + "type": "integer", + "description": "Seconds to wait for an answer (default 60)" + }, + "record": { + "type": "boolean", + "description": "Whether to record the call (default false)" + } + }, + "required": ["to", "from_"] + } + ), + types.Tool( + name="twilio_get_calls", + description="Retrieve a list of calls from your Twilio account. Use this to check call history, find calls with specific status, or analyze call patterns.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of calls to retrieve (default 20, max 1000)" + }, + "status": { + "type": "string", + "description": "Filter by call status", + "enum": ["queued", "ringing", "in-progress", "completed", "busy", "failed", "no-answer", "canceled"] + }, + "from_": { + "type": "string", + "description": "Filter by caller phone number" + }, + "to": { + "type": "string", + "description": "Filter by called phone number" + }, + "start_time_after": { + "type": "string", + "description": "ISO date string to filter calls started after this time" + }, + "start_time_before": { + "type": "string", + "description": "ISO date string to filter calls started before this time" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_VOICE", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_get_call_by_sid", + description="Retrieve detailed information about a specific call using its unique SID. Use this to get complete call details including duration, status, and billing information.", + inputSchema={ + "type": "object", + "properties": { + "call_sid": { + "type": "string", + "description": "Unique identifier (SID) for the call" + } + }, + "required": ["call_sid"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_VOICE", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_get_recordings", + description="Retrieve call recordings from your Twilio account. Use this to access recorded conversations for quality assurance, compliance, or analysis.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of recordings to retrieve (default 20, max 1000)" + }, + "call_sid": { + "type": "string", + "description": "Filter recordings by specific call SID" + }, + "date_created_after": { + "type": "string", + "description": "ISO date string to filter recordings created after this date" + }, + "date_created_before": { + "type": "string", + "description": "ISO date string to filter recordings created before this date" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_VOICE", "readOnlyHint": True}), + ), + + # Phone Number Management Tools + types.Tool( + name="twilio_search_available_numbers", + description="Search for available phone numbers to purchase from Twilio. Use this to find numbers with specific area codes, capabilities (SMS/voice), or number patterns.", + inputSchema={ + "type": "object", + "properties": { + "country_code": { + "type": "string", + "description": "Two-letter country code (default 'US')", + "default": "US" + }, + "area_code": { + "type": "string", + "description": "Specific area code to search within (e.g., '415' for San Francisco)" + }, + "contains": { + "type": "string", + "description": "Search for numbers containing specific digits" + }, + "sms_enabled": { + "type": "boolean", + "description": "Filter for SMS-capable numbers (default true)" + }, + "voice_enabled": { + "type": "boolean", + "description": "Filter for voice-capable numbers (default true)" + }, + "limit": { + "type": "integer", + "description": "Maximum number of results (default 20, max 50)" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_PHONE", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_purchase_phone_number", + description="Purchase an available phone number from Twilio. Use this after finding a suitable number with search_available_numbers. You can configure webhooks for incoming calls and messages.", + inputSchema={ + "type": "object", + "properties": { + "phone_number": { + "type": "string", + "description": "Phone number to purchase in E.164 format" + }, + "friendly_name": { + "type": "string", + "description": "A human-readable name for the number" + }, + "voice_url": { + "type": "string", + "description": "URL to handle incoming voice calls" + }, + "sms_url": { + "type": "string", + "description": "URL to handle incoming SMS messages" + }, + "status_callback": { + "type": "string", + "description": "URL to receive status updates" + } + }, + "required": ["phone_number"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_PHONE"}), + ), + types.Tool( + name="twilio_list_phone_numbers", + description="List all phone numbers currently owned by your Twilio account. Use this to see your phone number inventory and their current configurations.", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of numbers to retrieve (default 20, max 1000)" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_PHONE", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_update_phone_number", + description="Update the configuration of an existing phone number. Use this to change webhook URLs, friendly names, or other settings for numbers you own.", + inputSchema={ + "type": "object", + "properties": { + "phone_number_sid": { + "type": "string", + "description": "SID of the phone number to update" + }, + "friendly_name": { + "type": "string", + "description": "New friendly name for the number" + }, + "voice_url": { + "type": "string", + "description": "New URL to handle incoming voice calls" + }, + "sms_url": { + "type": "string", + "description": "New URL to handle incoming SMS messages" + }, + "status_callback": { + "type": "string", + "description": "New URL to receive status updates" + } + }, + "required": ["phone_number_sid"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_PHONE"}), + ), + types.Tool( + name="twilio_release_phone_number", + description="Release (delete) a phone number from your Twilio account. This permanently removes the number and stops all billing for it. Use with caution as this action cannot be undone.", + inputSchema={ + "type": "object", + "properties": { + "phone_number_sid": { + "type": "string", + "description": "SID of the phone number to release" + } + }, + "required": ["phone_number_sid"] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_PHONE"}), + ), + + # Account & Usage Tools + types.Tool( + name="twilio_get_account_info", + description="Retrieve your Twilio account information including status, type, and creation date. Use this to verify account details and current status.", + inputSchema={ + "type": "object", + "properties": {}, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_get_balance", + description="Get the current balance of your Twilio account. Use this to check available credit and monitor spending.", + inputSchema={ + "type": "object", + "properties": {}, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="twilio_get_usage_records", + description="Retrieve usage records for your Twilio account to analyze spending patterns, track usage by category, and generate usage reports. Perfect for billing analysis and cost monitoring.", + inputSchema={ + "type": "object", + "properties": { + "category": { + "type": "string", + "description": "Usage category to filter by (e.g., 'sms', 'calls', 'recordings')" + }, + "start_date": { + "type": "string", + "description": "Start date for usage period in YYYY-MM-DD format" + }, + "end_date": { + "type": "string", + "description": "End date for usage period in YYYY-MM-DD format" + }, + "granularity": { + "type": "string", + "description": "Time granularity for the report", + "enum": ["daily", "monthly", "yearly", "all-time"], + "default": "daily" + }, + "limit": { + "type": "integer", + "description": "Maximum number of records to retrieve (default 50, max 1000)" + } + }, + "required": [] + }, + annotations=types.ToolAnnotations(**{"category": "TWILIO_USAGE", "readOnlyHint": True}), + ), + ] + +async def call_tool_router(name: str, arguments: dict) -> dict: + """Unified tool router to avoid duplication between stdio and HTTP modes.""" + # Set auth token context from environment + auth_token = os.getenv("TWILIO_AUTH_TOKEN") + if auth_token: + auth_token_context.set(auth_token) + + logger.info(f"Calling tool: {name} with arguments: {arguments}") + + # Route to appropriate tool function + if name == "twilio_send_sms": + return await twilio_send_sms(**arguments) + elif name == "twilio_send_mms": + return await twilio_send_mms(**arguments) + elif name == "twilio_get_messages": + return await twilio_get_messages(**arguments) + elif name == "twilio_get_message_by_sid": + return await twilio_get_message_by_sid(**arguments) + elif name == "twilio_make_call": + return await twilio_make_call(**arguments) + elif name == "twilio_get_calls": + return await twilio_get_calls(**arguments) + elif name == "twilio_get_call_by_sid": + return await twilio_get_call_by_sid(**arguments) + elif name == "twilio_get_recordings": + return await twilio_get_recordings(**arguments) + elif name == "twilio_search_available_numbers": + return await twilio_search_available_numbers(**arguments) + elif name == "twilio_purchase_phone_number": + return await twilio_purchase_phone_number(**arguments) + elif name == "twilio_list_phone_numbers": + return await twilio_list_phone_numbers(**arguments) + elif name == "twilio_update_phone_number": + return await twilio_update_phone_number(**arguments) + elif name == "twilio_release_phone_number": + return await twilio_release_phone_number(**arguments) + elif name == "twilio_get_account_info": + return await twilio_get_account_info(**arguments) + elif name == "twilio_get_balance": + return await twilio_get_balance(**arguments) + elif name == "twilio_get_usage_records": + return await twilio_get_usage_records(**arguments) + else: + raise ValueError(f"Unknown tool: {name}") + +@click.command() +@click.option("--port", default=TWILIO_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +@click.option( + "--stdio", + is_flag=True, + default=False, + help="Run in stdio mode for Claude Desktop (instead of HTTP server mode)", +) +def main( + port: int, + log_level: str, + json_response: bool, + stdio: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + if stdio: + # Run stdio mode for Claude Desktop + return run_stdio_mode() + else: + # Run HTTP mode (default) + return run_http_mode(port, json_response) + + +def run_stdio_mode() -> int: + """Run the MCP server in stdio mode for Claude Desktop integration.""" + import sys + + def debug_log(message: str): + print(f"[TWILIO-DEBUG] {message}", file=sys.stderr) + sys.stderr.flush() + + debug_log("Twilio MCP Server initializing in stdio mode...") + + # Create the MCP server instance + app = Server("twilio-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + debug_log("Listing tools requested") + # Return all tools using shared function + return get_all_tools() + + @app.call_tool() + async def call_tool(name: str, arguments: dict) -> list[types.TextContent]: + debug_log(f"Tool call requested: {name}") + try: + # Use shared tool router with auth context handling + result = await call_tool_router(name, arguments) + debug_log(f"Tool {name} completed successfully") + return [types.TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + debug_log(f"Tool {name} failed: {str(e)}") + logger.error(f"Error calling tool {name}: {e}") + error_response = { + "error": str(e), + "tool": name, + "arguments": arguments + } + return [types.TextContent(type="text", text=json.dumps(error_response, indent=2))] + + async def run_server(): + debug_log("Starting stdio server...") + async with stdio_server() as (read_stream, write_stream): + await app.run(read_stream, write_stream, app.create_initialization_options()) + + asyncio.run(run_server()) + return 0 + + +def run_http_mode(port: int, json_response: bool) -> int: + """Run the MCP server in HTTP mode with dual transports.""" + + # Create the MCP server instance + app = Server("twilio-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + # Return all tools using shared function + return get_all_tools() + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + try: + # Use shared tool router with auth context handling + result = await call_tool_router(name, arguments) + return [ + types.TextContent( + type="text", + text=json.dumps(result, indent=2), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + + # Extract auth token from headers or use environment + auth_token = request.headers.get('x-auth-token') or os.getenv("TWILIO_AUTH_TOKEN") + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token or "") + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + + # Extract auth token from headers or use environment + headers = dict(scope.get("headers", [])) + auth_token = headers.get(b'x-auth-token') + if auth_token: + auth_token = auth_token.decode('utf-8') + else: + auth_token = os.getenv("TWILIO_AUTH_TOKEN") + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token or "") + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/mcp_servers/twilio/tools/__init__.py b/mcp_servers/twilio/tools/__init__.py new file mode 100644 index 00000000..0cbbe40a --- /dev/null +++ b/mcp_servers/twilio/tools/__init__.py @@ -0,0 +1,50 @@ +from .base import auth_token_context, get_twilio_client +from .messaging import ( + twilio_send_sms, + twilio_send_mms, + twilio_get_messages, + twilio_get_message_by_sid, +) +from .voice import ( + twilio_make_call, + twilio_get_calls, + twilio_get_call_by_sid, + twilio_get_recordings, +) +from .phone_numbers import ( + twilio_search_available_numbers, + twilio_purchase_phone_number, + twilio_list_phone_numbers, + twilio_update_phone_number, + twilio_release_phone_number, +) +from .account import ( + twilio_get_account_info, + twilio_get_usage_records, + twilio_get_balance, +) + +__all__ = [ + "auth_token_context", + "get_twilio_client", + # Messaging tools + "twilio_send_sms", + "twilio_send_mms", + "twilio_get_messages", + "twilio_get_message_by_sid", + # Voice tools + "twilio_make_call", + "twilio_get_calls", + "twilio_get_call_by_sid", + "twilio_get_recordings", + # Phone number tools + "twilio_search_available_numbers", + "twilio_purchase_phone_number", + "twilio_list_phone_numbers", + "twilio_update_phone_number", + "twilio_release_phone_number", + # Account tools + "twilio_get_account_info", + "twilio_get_usage_records", + "twilio_get_balance", +] \ No newline at end of file diff --git a/mcp_servers/twilio/tools/account.py b/mcp_servers/twilio/tools/account.py new file mode 100644 index 00000000..ec791576 --- /dev/null +++ b/mcp_servers/twilio/tools/account.py @@ -0,0 +1,203 @@ +import logging +from typing import Optional, Dict, Any +from .base import get_twilio_client + +# Configure logging +logger = logging.getLogger(__name__) + +async def twilio_get_account_info() -> dict: + """ + Retrieve Twilio account information and status. + + Returns: + - Dictionary containing account details, status, and settings + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info("Retrieving Twilio account information") + + account = client.api.account.fetch() + + return { + 'sid': account.sid, + 'friendly_name': account.friendly_name, + 'status': account.status, + 'type': account.type, + 'auth_token': account.auth_token, + 'owner_account_sid': account.owner_account_sid, + 'date_created': account.date_created.isoformat() if account.date_created else None, + 'date_updated': account.date_updated.isoformat() if account.date_updated else None, + } + except Exception as e: + logger.error(f"Error retrieving account information: {e}") + raise e + +async def twilio_get_balance() -> dict: + """ + Retrieve current account balance. + + Returns: + - Dictionary containing account balance information + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info("Retrieving account balance") + + balance = client.balance.fetch() + + return { + 'account_sid': balance.account_sid, + 'balance': balance.balance, + 'currency': balance.currency, + } + except Exception as e: + logger.error(f"Error retrieving account balance: {e}") + raise e + +async def twilio_get_usage_records( + category: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + granularity: str = "daily", + limit: int = 50 +) -> dict: + """ + Retrieve usage records for the account. + + Parameters: + - category: Usage category (sms, calls, recordings, etc.) + - start_date: Start date for usage period (YYYY-MM-DD format) + - end_date: End date for usage period (YYYY-MM-DD format) + - granularity: Time granularity (daily, monthly, yearly, all-time) + - limit: Maximum number of records to retrieve (default 50, max 1000) + + Returns: + - Dictionary with usage records and totals + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving usage records with granularity: {granularity}") + + filter_params = {'limit': limit} + + if category: + filter_params['category'] = category + if start_date: + filter_params['start_date'] = start_date + if end_date: + filter_params['end_date'] = end_date + + # Map granularity to the appropriate usage endpoint + if granularity.lower() == "daily": + usage_records = client.usage.records.daily.list(**filter_params) + elif granularity.lower() == "monthly": + usage_records = client.usage.records.monthly.list(**filter_params) + elif granularity.lower() == "yearly": + usage_records = client.usage.records.yearly.list(**filter_params) + elif granularity.lower() == "all-time": + usage_records = client.usage.records.list(**filter_params) + else: + raise ValueError("Granularity must be one of: daily, monthly, yearly, all-time") + + records_list = [] + total_usage = 0 + total_price = 0 + + for record in usage_records: + record_data = { + 'category': record.category, + 'description': record.description, + 'count': record.count, + 'count_unit': record.count_unit, + 'usage': record.usage, + 'usage_unit': record.usage_unit, + 'price': float(record.price) if record.price else 0, + 'price_unit': record.price_unit, + 'start_date': record.start_date.strftime('%Y-%m-%d') if record.start_date else None, + 'end_date': record.end_date.strftime('%Y-%m-%d') if record.end_date else None, + } + records_list.append(record_data) + + # Accumulate totals + if record.usage: + total_usage += int(record.usage) + if record.price: + total_price += float(record.price) + + logger.info(f"Retrieved {len(records_list)} usage records") + + return { + 'usage_records': records_list, + 'count': len(records_list), + 'summary': { + 'total_usage': total_usage, + 'total_price': round(total_price, 4), + 'currency': records_list[0]['price_unit'] if records_list else 'USD', + 'granularity': granularity, + }, + 'filters_applied': {k: v for k, v in filter_params.items() if k != 'limit'} + } + except Exception as e: + logger.error(f"Error retrieving usage records: {e}") + raise e + +async def twilio_get_usage_triggers(limit: int = 50) -> dict: + """ + Retrieve usage triggers configured for the account. + + Parameters: + - limit: Maximum number of triggers to retrieve (default 50, max 1000) + + Returns: + - Dictionary with list of configured usage triggers + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving up to {limit} usage triggers") + + triggers = client.usage.triggers.list(limit=limit) + + trigger_list = [] + for trigger in triggers: + trigger_list.append({ + 'sid': trigger.sid, + 'friendly_name': trigger.friendly_name, + 'usage_category': trigger.usage_category, + 'trigger_value': trigger.trigger_value, + 'current_value': trigger.current_value, + 'trigger_by': trigger.trigger_by, + 'recurring': trigger.recurring, + 'callback_url': trigger.callback_url, + 'callback_method': trigger.callback_method, + 'date_created': trigger.date_created.isoformat() if trigger.date_created else None, + 'date_fired': trigger.date_fired.isoformat() if trigger.date_fired else None, + 'date_updated': trigger.date_updated.isoformat() if trigger.date_updated else None, + }) + + logger.info(f"Retrieved {len(trigger_list)} usage triggers") + + return { + 'usage_triggers': trigger_list, + 'count': len(trigger_list) + } + except Exception as e: + logger.error(f"Error retrieving usage triggers: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/twilio/tools/base.py b/mcp_servers/twilio/tools/base.py new file mode 100644 index 00000000..0eeb3c98 --- /dev/null +++ b/mcp_servers/twilio/tools/base.py @@ -0,0 +1,69 @@ +import logging +import os +from contextvars import ContextVar +from twilio.rest import Client +from typing import Optional +from dotenv import load_dotenv + +# Configure logging +logger = logging.getLogger(__name__) + +load_dotenv() + +# Context variable to store the auth token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + token = auth_token_context.get() + if not token: + # Fallback to environment variable if no token in context + token = os.getenv("TWILIO_AUTH_TOKEN") + if not token: + raise RuntimeError("No authentication token available") + return token + except LookupError: + token = os.getenv("TWILIO_AUTH_TOKEN") + if not token: + raise RuntimeError("Authentication token not found in request context or environment") + return token + +def get_account_sid() -> str: + """Get the Twilio Account SID from environment.""" + account_sid = os.getenv("TWILIO_ACCOUNT_SID") + if not account_sid: + raise RuntimeError("TWILIO_ACCOUNT_SID not found in environment variables") + return account_sid + +def get_twilio_client() -> Optional[Client]: + """Create and return a Twilio client instance.""" + try: + account_sid = get_account_sid() + auth_token = get_auth_token() + + logger.debug(f"Creating Twilio client with Account SID: {account_sid}") + client = Client(account_sid, auth_token) + return client + except Exception as e: + logger.error(f"Failed to create Twilio client: {e}") + raise e + +def validate_phone_number(phone_number: str) -> str: + """Validate and format phone number to E.164 format.""" + if not phone_number: + raise ValueError("Phone number cannot be empty") + + # Remove spaces, dashes, parentheses + cleaned = ''.join(c for c in phone_number if c.isdigit() or c == '+') + + # Add + prefix if not present and number starts with country code + if not cleaned.startswith('+'): + if cleaned.startswith('1') and len(cleaned) == 11: + cleaned = '+' + cleaned + elif len(cleaned) == 10: + cleaned = '+1' + cleaned + else: + raise ValueError(f"Invalid phone number format: {phone_number}") + + return cleaned \ No newline at end of file diff --git a/mcp_servers/twilio/tools/messaging.py b/mcp_servers/twilio/tools/messaging.py new file mode 100644 index 00000000..6c245b6c --- /dev/null +++ b/mcp_servers/twilio/tools/messaging.py @@ -0,0 +1,247 @@ +import logging +from typing import Optional, List +from .base import get_twilio_client, validate_phone_number + +# Configure logging +logger = logging.getLogger(__name__) + +async def twilio_send_sms( + to: str, + from_: str, + body: str, + status_callback: Optional[str] = None +) -> dict: + """ + Send an SMS message using Twilio. + + Parameters: + - to: Recipient phone number in E.164 format (e.g., +1234567890) + - from_: Sender phone number (must be a Twilio phone number) + - body: Message content (up to 1600 characters) + - status_callback: Optional webhook URL for delivery status updates + + Returns: + - Dictionary containing message SID, status, and other details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + # Validate phone numbers + to_number = validate_phone_number(to) + from_number = validate_phone_number(from_) + + if len(body) > 1600: + raise ValueError("SMS body cannot exceed 1600 characters") + + logger.info(f"Sending SMS from {from_number} to {to_number}") + + message_params = { + 'body': body, + 'from_': from_number, + 'to': to_number + } + + if status_callback: + message_params['status_callback'] = status_callback + + message = client.messages.create(**message_params) + + logger.info(f"SMS sent successfully. SID: {message.sid}") + + return { + 'sid': message.sid, + 'status': message.status, + 'direction': message.direction, # Handle reserved keyword + 'to': message.to, + 'body': message.body, + 'num_segments': message.num_segments, + 'price': message.price, + 'price_unit': message.price_unit, + 'date_created': message.date_created.isoformat() if message.date_created else None, + 'date_sent': message.date_sent.isoformat() if message.date_sent else None, + } + except Exception as e: + logger.error(f"Error sending SMS: {e}") + raise e + +async def twilio_send_mms( + to: str, + from_: str, + body: Optional[str] = None, + media_url: Optional[List[str]] = None, + status_callback: Optional[str] = None +) -> dict: + """ + Send an MMS message with media attachments using Twilio. + + Parameters: + - to: Recipient phone number in E.164 format + - from_: Sender phone number (must be a Twilio phone number) + - body: Optional message text + - media_url: List of media URLs to attach (images, videos, audio, PDFs) + - status_callback: Optional webhook URL for delivery status updates + + Returns: + - Dictionary containing message SID, status, and other details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + # Validate phone numbers + to_number = validate_phone_number(to) + from_number = validate_phone_number(from_) + + if not body and not media_url: + raise ValueError("MMS must contain either body text or media URL") + + if media_url and len(media_url) > 10: + raise ValueError("Maximum 10 media attachments allowed per MMS") + + logger.info(f"Sending MMS from {from_number} to {to_number}") + + message_params = { + 'from_': from_number, + 'to': to_number + } + + if body: + message_params['body'] = body + + if media_url: + message_params['media_url'] = media_url + + if status_callback: + message_params['status_callback'] = status_callback + + message = client.messages.create(**message_params) + + logger.info(f"MMS sent successfully. SID: {message.sid}") + + return { + 'sid': message.sid, + 'status': message.status, + 'direction': message.direction, # Handle reserved keyword + 'to': message.to, + 'body': message.body, + 'num_media': message.num_media, + 'num_segments': message.num_segments, + 'price': message.price, + 'price_unit': message.price_unit, + 'date_created': message.date_created.isoformat() if message.date_created else None, + 'date_sent': message.date_sent.isoformat() if message.date_sent else None, + } + except Exception as e: + logger.error(f"Error sending MMS: {e}") + raise e + +async def twilio_get_messages( + limit: int = 20, + date_sent_after: Optional[str] = None, + date_sent_before: Optional[str] = None, + from_: Optional[str] = None, + to: Optional[str] = None +) -> dict: + """ + Retrieve a list of messages from Twilio account. + + Parameters: + - limit: Maximum number of messages to retrieve (default 20, max 1000) + - date_sent_after: ISO date string to filter messages sent after this date + - date_sent_before: ISO date string to filter messages sent before this date + - from_: Filter by sender phone number + - to: Filter by recipient phone number + + Returns: + - Dictionary with list of messages and metadata + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving up to {limit} messages") + + filter_params = {} + if date_sent_after: + filter_params['date_sent_after'] = date_sent_after + if date_sent_before: + filter_params['date_sent_before'] = date_sent_before + if from_: + filter_params['from_'] = validate_phone_number(from_) + if to: + filter_params['to'] = validate_phone_number(to) + + messages = client.messages.list(limit=limit, **filter_params) + + message_list = [] + for message in messages: + message_list.append({ + 'sid': message.sid, + 'status': message.status, + 'direction': message.direction, # Handle reserved keyword + 'to': message.to, + 'body': message.body, + 'num_segments': message.num_segments, + 'num_media': message.num_media, + 'price': message.price, + 'price_unit': message.price_unit, + 'date_created': message.date_created.isoformat() if message.date_created else None, + 'date_sent': message.date_sent.isoformat() if message.date_sent else None, + }) + + logger.info(f"Retrieved {len(message_list)} messages") + + return { + 'messages': message_list, + 'count': len(message_list), + 'filters_applied': filter_params + } + except Exception as e: + logger.error(f"Error retrieving messages: {e}") + raise e + +async def twilio_get_message_by_sid(message_sid: str) -> dict: + """ + Retrieve a specific message by its SID. + + Parameters: + - message_sid: Unique identifier for the message + + Returns: + - Dictionary containing message details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info(f"Retrieving message with SID: {message_sid}") + + message = client.messages(message_sid).fetch() + + return { + 'sid': message.sid, + 'status': message.status, + 'direction': message.direction, # Handle reserved keyword + 'to': message.to, + 'body': message.body, + 'num_segments': message.num_segments, + 'num_media': message.num_media, + 'price': message.price, + 'price_unit': message.price_unit, + 'error_code': message.error_code, + 'error_message': message.error_message, + 'date_created': message.date_created.isoformat() if message.date_created else None, + 'date_sent': message.date_sent.isoformat() if message.date_sent else None, + 'date_updated': message.date_updated.isoformat() if message.date_updated else None, + } + except Exception as e: + logger.error(f"Error retrieving message {message_sid}: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/twilio/tools/phone_numbers.py b/mcp_servers/twilio/tools/phone_numbers.py new file mode 100644 index 00000000..ccc40099 --- /dev/null +++ b/mcp_servers/twilio/tools/phone_numbers.py @@ -0,0 +1,298 @@ +import logging +from typing import Optional, List, Dict, Any +from .base import get_twilio_client, validate_phone_number + +# Configure logging +logger = logging.getLogger(__name__) + +async def twilio_search_available_numbers( + country_code: str = "US", + area_code: Optional[str] = None, + contains: Optional[str] = None, + sms_enabled: bool = True, + voice_enabled: bool = True, + limit: int = 20 +) -> dict: + """ + Search for available phone numbers to purchase. + + Parameters: + - country_code: Two-letter country code (default "US") + - area_code: Specific area code to search within + - contains: Search for numbers containing specific digits + - sms_enabled: Filter for SMS-capable numbers (default True) + - voice_enabled: Filter for voice-capable numbers (default True) + - limit: Maximum number of results (default 20, max 50) + + Returns: + - Dictionary with list of available phone numbers and their capabilities + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 50: + limit = 50 + + logger.info(f"Searching for available numbers in {country_code}") + + search_params = { + 'limit': limit, + 'sms_enabled': sms_enabled, + 'voice_enabled': voice_enabled + } + + if area_code: + search_params['area_code'] = area_code + if contains: + search_params['contains'] = contains + + if country_code.upper() == "US": + numbers = client.available_phone_numbers("US").local.list(**search_params) + else: + numbers = client.available_phone_numbers(country_code.upper()).local.list(**search_params) + + number_list = [] + for number in numbers: + number_list.append({ + 'phone_number': number.phone_number, + 'friendly_name': number.friendly_name, + 'iso_country': number.iso_country, + 'locality': number.locality, + 'region': number.region, + 'postal_code': number.postal_code, + 'capabilities': { + 'voice': number.capabilities.get('voice', False), + 'sms': number.capabilities.get('SMS', False), + 'mms': number.capabilities.get('MMS', False), + 'fax': number.capabilities.get('fax', False) + } + }) + + logger.info(f"Found {len(number_list)} available numbers") + + return { + 'available_numbers': number_list, + 'count': len(number_list), + 'search_criteria': search_params, + 'country_code': country_code.upper() + } + except Exception as e: + logger.error(f"Error searching available numbers: {e}") + raise e + +async def twilio_purchase_phone_number( + phone_number: str, + friendly_name: Optional[str] = None, + voice_url: Optional[str] = None, + sms_url: Optional[str] = None, + status_callback: Optional[str] = None +) -> dict: + """ + Purchase a phone number from Twilio. + + Parameters: + - phone_number: Phone number to purchase in E.164 format + - friendly_name: A human-readable name for the number + - voice_url: URL to handle incoming voice calls + - sms_url: URL to handle incoming SMS messages + - status_callback: URL to receive status updates + + Returns: + - Dictionary containing purchased number details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + phone_number = validate_phone_number(phone_number) + + logger.info(f"Purchasing phone number: {phone_number}") + + purchase_params = { + 'phone_number': phone_number + } + + if friendly_name: + purchase_params['friendly_name'] = friendly_name + if voice_url: + purchase_params['voice_url'] = voice_url + if sms_url: + purchase_params['sms_url'] = sms_url + if status_callback: + purchase_params['status_callback'] = status_callback + + incoming_number = client.incoming_phone_numbers.create(**purchase_params) + + logger.info(f"Phone number purchased successfully. SID: {incoming_number.sid}") + + return { + 'sid': incoming_number.sid, + 'phone_number': incoming_number.phone_number, + 'friendly_name': incoming_number.friendly_name, + 'voice_url': incoming_number.voice_url, + 'sms_url': incoming_number.sms_url, + 'status_callback': incoming_number.status_callback, + 'capabilities': { + 'voice': incoming_number.capabilities.get('voice', False), + 'sms': incoming_number.capabilities.get('SMS', False), + 'mms': incoming_number.capabilities.get('MMS', False), + 'fax': incoming_number.capabilities.get('fax', False) + }, + 'date_created': incoming_number.date_created.isoformat() if incoming_number.date_created else None, + } + except Exception as e: + logger.error(f"Error purchasing phone number {phone_number}: {e}") + raise e + +async def twilio_list_phone_numbers(limit: int = 20) -> dict: + """ + List all phone numbers owned by the Twilio account. + + Parameters: + - limit: Maximum number of numbers to retrieve (default 20, max 1000) + + Returns: + - Dictionary with list of owned phone numbers + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving up to {limit} phone numbers") + + numbers = client.incoming_phone_numbers.list(limit=limit) + + number_list = [] + for number in numbers: + number_list.append({ + 'sid': number.sid, + 'phone_number': number.phone_number, + 'friendly_name': number.friendly_name, + 'voice_url': number.voice_url, + 'sms_url': number.sms_url, + 'status_callback': number.status_callback, + 'capabilities': { + 'voice': number.capabilities.get('voice', False), + 'sms': number.capabilities.get('SMS', False), + 'mms': number.capabilities.get('MMS', False), + 'fax': number.capabilities.get('fax', False) + }, + 'date_created': number.date_created.isoformat() if number.date_created else None, + 'date_updated': number.date_updated.isoformat() if number.date_updated else None, + }) + + logger.info(f"Retrieved {len(number_list)} phone numbers") + + return { + 'phone_numbers': number_list, + 'count': len(number_list) + } + except Exception as e: + logger.error(f"Error listing phone numbers: {e}") + raise e + +async def twilio_update_phone_number( + phone_number_sid: str, + friendly_name: Optional[str] = None, + voice_url: Optional[str] = None, + sms_url: Optional[str] = None, + status_callback: Optional[str] = None +) -> dict: + """ + Update configuration of an existing phone number. + + Parameters: + - phone_number_sid: SID of the phone number to update + - friendly_name: New friendly name for the number + - voice_url: New URL to handle incoming voice calls + - sms_url: New URL to handle incoming SMS messages + - status_callback: New URL to receive status updates + + Returns: + - Dictionary containing updated phone number details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info(f"Updating phone number with SID: {phone_number_sid}") + + update_params = {} + if friendly_name is not None: + update_params['friendly_name'] = friendly_name + if voice_url is not None: + update_params['voice_url'] = voice_url + if sms_url is not None: + update_params['sms_url'] = sms_url + if status_callback is not None: + update_params['status_callback'] = status_callback + + if not update_params: + raise ValueError("At least one parameter must be provided to update") + + incoming_number = client.incoming_phone_numbers(phone_number_sid).update(**update_params) + + logger.info(f"Phone number updated successfully") + + return { + 'sid': incoming_number.sid, + 'phone_number': incoming_number.phone_number, + 'friendly_name': incoming_number.friendly_name, + 'voice_url': incoming_number.voice_url, + 'sms_url': incoming_number.sms_url, + 'status_callback': incoming_number.status_callback, + 'capabilities': { + 'voice': incoming_number.capabilities.get('voice', False), + 'sms': incoming_number.capabilities.get('SMS', False), + 'mms': incoming_number.capabilities.get('MMS', False), + 'fax': incoming_number.capabilities.get('fax', False) + }, + 'date_updated': incoming_number.date_updated.isoformat() if incoming_number.date_updated else None, + } + except Exception as e: + logger.error(f"Error updating phone number {phone_number_sid}: {e}") + raise e + +async def twilio_release_phone_number(phone_number_sid: str) -> dict: + """ + Release (delete) a phone number from the Twilio account. + + Parameters: + - phone_number_sid: SID of the phone number to release + + Returns: + - Dictionary confirming the release operation + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info(f"Releasing phone number with SID: {phone_number_sid}") + + # Fetch number details before deletion for confirmation + number = client.incoming_phone_numbers(phone_number_sid).fetch() + phone_number = number.phone_number + + # Delete the number + client.incoming_phone_numbers(phone_number_sid).delete() + + logger.info(f"Phone number {phone_number} released successfully") + + return { + 'success': True, + 'message': f"Phone number {phone_number} has been released", + 'released_number': phone_number, + 'sid': phone_number_sid + } + except Exception as e: + logger.error(f"Error releasing phone number {phone_number_sid}: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/twilio/tools/voice.py b/mcp_servers/twilio/tools/voice.py new file mode 100644 index 00000000..857e5c43 --- /dev/null +++ b/mcp_servers/twilio/tools/voice.py @@ -0,0 +1,269 @@ +import logging +from typing import Optional, Dict, Any +from .base import get_twilio_client, validate_phone_number + +# Configure logging +logger = logging.getLogger(__name__) + +async def twilio_make_call( + to: str, + from_: str, + url: Optional[str] = None, + twiml: Optional[str] = None, + method: str = "POST", + status_callback: Optional[str] = None, + timeout: int = 60, + record: bool = False +) -> dict: + """ + Make a phone call using Twilio. + + Parameters: + - to: Phone number to call in E.164 format + - from_: Caller phone number (must be a Twilio phone number) + - url: URL that returns TwiML instructions for the call + - twiml: TwiML instructions as a string (alternative to url) + - method: HTTP method for the webhook (GET or POST) + - status_callback: URL to receive call status updates + - timeout: Number of seconds to wait for an answer (default 60) + - record: Whether to record the call + + Returns: + - Dictionary containing call SID, status, and other details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + # Validate phone numbers + to_number = validate_phone_number(to) + from_number = validate_phone_number(from_) + + if not url and not twiml: + raise ValueError("Either 'url' or 'twiml' parameter must be provided") + + if url and twiml: + raise ValueError("Cannot specify both 'url' and 'twiml' parameters") + + logger.info(f"Making call from {from_number} to {to_number}") + + call_params = { + 'from_': from_number, + 'to': to_number, + 'method': method.upper(), + 'timeout': timeout + } + + if url: + call_params['url'] = url + elif twiml: + call_params['twiml'] = twiml + + if status_callback: + call_params['status_callback'] = status_callback + + if record: + call_params['record'] = record + + call = client.calls.create(**call_params) + + logger.info(f"Call initiated successfully. SID: {call.sid}") + + return { + 'sid': call.sid, + 'status': call.status, + 'direction': call.direction, + 'to': call.to, + 'duration': call.duration, + 'price': call.price, + 'price_unit': call.price_unit, + 'date_created': call.date_created.isoformat() if call.date_created else None, + 'date_updated': call.date_updated.isoformat() if call.date_updated else None, + } + except Exception as e: + logger.error(f"Error making call: {e}") + raise e + +async def twilio_get_calls( + limit: int = 20, + status: Optional[str] = None, + from_: Optional[str] = None, + to: Optional[str] = None, + start_time_after: Optional[str] = None, + start_time_before: Optional[str] = None +) -> dict: + """ + Retrieve a list of calls from Twilio account. + + Parameters: + - limit: Maximum number of calls to retrieve (default 20, max 1000) + - status: Filter by call status (queued, ringing, in-progress, completed, busy, failed, no-answer, canceled) + - from_: Filter by caller phone number + - to: Filter by called phone number + - start_time_after: ISO date string to filter calls started after this time + - start_time_before: ISO date string to filter calls started before this time + + Returns: + - Dictionary with list of calls and metadata + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving up to {limit} calls") + + filter_params = {} + if status: + valid_statuses = ['queued', 'ringing', 'in-progress', 'completed', 'busy', 'failed', 'no-answer', 'canceled'] + if status.lower() not in valid_statuses: + raise ValueError(f"Invalid status. Must be one of: {', '.join(valid_statuses)}") + filter_params['status'] = status.lower() + + if from_: + filter_params['from_'] = validate_phone_number(from_) + if to: + filter_params['to'] = validate_phone_number(to) + if start_time_after: + filter_params['start_time_after'] = start_time_after + if start_time_before: + filter_params['start_time_before'] = start_time_before + + calls = client.calls.list(limit=limit, **filter_params) + + call_list = [] + for call in calls: + call_list.append({ + 'sid': call.sid, + 'status': call.status, + 'direction': call.direction, # Handle reserved keyword + 'to': call.to, + 'duration': call.duration, + 'price': call.price, + 'price_unit': call.price_unit, + 'forwarded_from': call.forwarded_from, + 'caller_name': call.caller_name, + 'date_created': call.date_created.isoformat() if call.date_created else None, + 'start_time': call.start_time.isoformat() if call.start_time else None, + 'end_time': call.end_time.isoformat() if call.end_time else None, + }) + + logger.info(f"Retrieved {len(call_list)} calls") + + return { + 'calls': call_list, + 'count': len(call_list), + 'filters_applied': filter_params + } + except Exception as e: + logger.error(f"Error retrieving calls: {e}") + raise e + +async def twilio_get_call_by_sid(call_sid: str) -> dict: + """ + Retrieve a specific call by its SID. + + Parameters: + - call_sid: Unique identifier for the call + + Returns: + - Dictionary containing call details + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + logger.info(f"Retrieving call with SID: {call_sid}") + + call = client.calls(call_sid).fetch() + + return { + 'sid': call.sid, + 'status': call.status, + 'direction': call.direction, + 'to': call.to, + 'duration': call.duration, + 'price': call.price, + 'price_unit': call.price_unit, + 'forwarded_from': call.forwarded_from, + 'caller_name': call.caller_name, + 'parent_call_sid': call.parent_call_sid, + 'answered_by': call.answered_by, + 'date_created': call.date_created.isoformat() if call.date_created else None, + 'date_updated': call.date_updated.isoformat() if call.date_updated else None, + 'start_time': call.start_time.isoformat() if call.start_time else None, + 'end_time': call.end_time.isoformat() if call.end_time else None, + } + except Exception as e: + logger.error(f"Error retrieving call {call_sid}: {e}") + raise e + +async def twilio_get_recordings( + limit: int = 20, + call_sid: Optional[str] = None, + date_created_after: Optional[str] = None, + date_created_before: Optional[str] = None +) -> dict: + """ + Retrieve call recordings from Twilio account. + + Parameters: + - limit: Maximum number of recordings to retrieve (default 20, max 1000) + - call_sid: Filter recordings by specific call SID + - date_created_after: ISO date string to filter recordings created after this date + - date_created_before: ISO date string to filter recordings created before this date + + Returns: + - Dictionary with list of recordings and metadata + """ + client = get_twilio_client() + if not client: + raise ValueError("Twilio client not available. Please check authentication.") + + try: + if limit > 1000: + limit = 1000 + + logger.info(f"Retrieving up to {limit} recordings") + + filter_params = {} + if call_sid: + filter_params['call_sid'] = call_sid + if date_created_after: + filter_params['date_created_after'] = date_created_after + if date_created_before: + filter_params['date_created_before'] = date_created_before + + recordings = client.recordings.list(limit=limit, **filter_params) + + recording_list = [] + for recording in recordings: + recording_list.append({ + 'sid': recording.sid, + 'call_sid': recording.call_sid, + 'status': recording.status, + 'duration': recording.duration, + 'channels': recording.channels, + 'source': recording.source, + 'price': recording.price, + 'price_unit': recording.price_unit, + 'uri': recording.uri, + 'date_created': recording.date_created.isoformat() if recording.date_created else None, + 'date_updated': recording.date_updated.isoformat() if recording.date_updated else None, + }) + + logger.info(f"Retrieved {len(recording_list)} recordings") + + return { + 'recordings': recording_list, + 'count': len(recording_list), + 'filters_applied': filter_params + } + except Exception as e: + logger.error(f"Error retrieving recordings: {e}") + raise e \ No newline at end of file diff --git a/mcp_servers/whatsapp/.eslintrc.json b/mcp_servers/whatsapp/.eslintrc.json new file mode 100644 index 00000000..52c9c608 --- /dev/null +++ b/mcp_servers/whatsapp/.eslintrc.json @@ -0,0 +1,14 @@ +{ + "env": { + "node": true, + "es2022": true + }, + "extends": [ + "eslint:recommended", + "@typescript-eslint/recommended", + "prettier" + ], + "parser": "@typescript-eslint/parser", + "plugins": ["@typescript-eslint"], + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/whatsapp/Dockerfile b/mcp_servers/whatsapp/Dockerfile new file mode 100644 index 00000000..88e08fc2 --- /dev/null +++ b/mcp_servers/whatsapp/Dockerfile @@ -0,0 +1,32 @@ +FROM node:22.12-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/whatsapp/package.json mcp_servers/whatsapp/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/whatsapp . + +# Build the application using TypeScript +RUN npm run build + +FROM node:22-alpine AS release + +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +ENV NODE_ENV=production + +EXPOSE 5000 + +WORKDIR /app + +RUN npm ci --ignore-scripts --omit-dev + +ENTRYPOINT ["node", "dist/index.js"] \ No newline at end of file diff --git a/mcp_servers/whatsapp/README.md b/mcp_servers/whatsapp/README.md new file mode 100644 index 00000000..542c8c15 --- /dev/null +++ b/mcp_servers/whatsapp/README.md @@ -0,0 +1,73 @@ +# WhatsApp MCP Server + +A Model Context Protocol (MCP) server for WhatsApp Business integration. Send messages and manage WhatsApp Business conversations using WhatsApp's Business API. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to WhatsApp with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("WHATSAPP", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/whatsapp-mcp-server:latest + + +# Run WhatsApp MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/whatsapp-mcp-server:latest +``` + +**Access Token Setup:** Get your WhatsApp Business API access token from the [Meta for Developers](https://developers.facebook.com/) platform. + +## šŸ› ļø Available Tools + +- **Message Sending**: Send text messages and media to WhatsApp contacts +- **Template Messages**: Use pre-approved message templates +- **Media Handling**: Send images, documents, and other media types +- **Business Profile**: Manage WhatsApp Business profile information +- **Webhook Management**: Handle incoming messages and events + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/whatsapp/index.ts b/mcp_servers/whatsapp/index.ts new file mode 100644 index 00000000..3c659818 --- /dev/null +++ b/mcp_servers/whatsapp/index.ts @@ -0,0 +1,322 @@ +import express, { Request, Response } from 'express'; +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + Tool, + CallToolRequestSchema, + ListToolsRequestSchema, +} from '@modelcontextprotocol/sdk/types.js'; +import { AsyncLocalStorage } from 'async_hooks'; +import dotenv from 'dotenv'; + +dotenv.config(); + +// WhatsApp Business API configuration +const WHATSAPP_API_URL = 'https://graph.facebook.com/'; +const API_VERSION = 'v23.0'; + +// Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + accessToken: string; +}>(); + +// WhatsApp Business API Client +class WhatsAppClient { + private accessToken: string; + private phoneNumberId: string; + private baseUrl: string; + + constructor(accessToken: string, phoneNumberId: string, baseUrl: string = WHATSAPP_API_URL) { + this.accessToken = accessToken; + this.phoneNumberId = phoneNumberId; + this.baseUrl = baseUrl; + } + + private async makeRequest(endpoint: string, data: any): Promise { + const url = `${this.baseUrl}/${API_VERSION}/${this.phoneNumberId}${endpoint}`; + const headers = { + 'Authorization': `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + }; + + const response = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify(data), + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`WhatsApp API error: ${response.status} ${response.statusText} - ${errorText}`); + } + + return response.json(); + } + + async sendTextMessage(data: { + to: string; + text: string; + preview_url?: boolean; + }): Promise { + const messageData = { + messaging_product: "whatsapp", + recipient_type: "individual", + to: data.to, + type: "text", + text: { + preview_url: data.preview_url || false, + body: data.text + } + }; + + return this.makeRequest('/messages', messageData); + } +} + +function getAccessToken() { + const store = asyncLocalStorage.getStore(); + if (!store) { + throw new Error('Access token not found in AsyncLocalStorage'); + } + return store.accessToken; +} + +// Tool definitions +const SEND_TEXT_MESSAGE_TOOL: Tool = { + name: 'whatsapp_send_text', + description: 'Send a text message to a WhatsApp user using the WhatsApp Business API.', + inputSchema: { + type: 'object', + properties: { + phone_number_id: { + type: 'string', + description: 'WhatsApp Business phone number ID (e.g., 123456789012345)', + }, + to: { + type: 'string', + description: 'WhatsApp user phone number in international format (e.g., +16505551234)', + }, + text: { + type: 'string', + description: 'Body text of the message. URLs are automatically hyperlinked. Maximum 1024 characters.', + }, + preview_url: { + type: 'boolean', + description: 'Set to true to have the WhatsApp client attempt to render a link preview of any URL in the body text string.', + default: false, + }, + }, + required: ['phone_number_id', 'to', 'text'], + }, + annotations: { + category: 'WHATSAPP_MESSAGE', + }, +}; + +function safeLog(level: 'error' | 'debug' | 'info' | 'notice' | 'warning' | 'critical' | 'alert' | 'emergency', data: any): void { + try { + console.log(`[${level.toUpperCase()}]`, typeof data === 'string' ? data : JSON.stringify(data, null, 2)); + } catch (error) { + console.log(`[${level.toUpperCase()}] [LOG_ERROR]`, data); + } +} + +const getWhatsAppMcpServer = () => { + const server = new Server( + { + name: 'whatsapp-mcp-server', + version: '1.0.0', + }, + { + capabilities: { + tools: {}, + }, + } + ); + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + SEND_TEXT_MESSAGE_TOOL, + ], + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + try { + switch (name) { + case 'whatsapp_send_text': { + const accessToken = getAccessToken(); + const phoneNumberId = (args as any)?.phone_number_id; + const client = new WhatsAppClient(accessToken, phoneNumberId); + const result = await client.sendTextMessage({ + to: (args as any)?.to, + text: (args as any)?.text, + preview_url: (args as any)?.preview_url, + }); + + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; + } + + default: + throw new Error(`Unknown tool: ${name}`); + } + } catch (error: any) { + safeLog('error', `Tool ${name} failed: ${error.message}`); + return { + content: [ + { + type: 'text', + text: `Error: ${error.message}`, + }, + ], + isError: true, + }; + } + }); + + return server; +}; + +function extractApiKey(req: Request): string { + let authData = process.env.API_KEY; + + if (authData) { + return authData; + } + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: WhatsApp API key is missing. Provide it via API_KEY env var or x-auth-data header with token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.token ?? authDataJson.api_key ?? ''; +} + +const app = express(); + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const accessToken = extractApiKey(req); + + const server = getWhatsAppMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ accessToken }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +// to support multiple simultaneous connections we have a lookup object from +// sessionId to transport +const transports = new Map(); + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getWhatsAppMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + const transport = transports.get(sessionId); + if (transport) { + const accessToken = extractApiKey(req); + + asyncLocalStorage.run({ accessToken }, async () => { + await transport.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +app.listen(5000, () => { + console.log('WhatsApp MCP server running on port 5000'); +}); \ No newline at end of file diff --git a/mcp_servers/whatsapp/package-lock.json b/mcp_servers/whatsapp/package-lock.json new file mode 100644 index 00000000..2e1295c4 --- /dev/null +++ b/mcp_servers/whatsapp/package-lock.json @@ -0,0 +1,6308 @@ +{ + "name": "@klavis-ai/mcp-server-whatsapp", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-whatsapp", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.7", + "express": "^5.1.0", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1" + }, + "bin": { + "whatsapp-mcp": "dist/index.js" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/express": "^5.0.1", + "@types/jest": "^29.5.14", + "@types/node": "^20.10.5", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.27.7.tgz", + "integrity": "sha512-xgu/ySj2mTiUFmdE9yCMfBxLp4DHd5DwmbbD05YAuICfodYT3VvRxbrh81LGQ/8UpSdtMdfKMn3KouYDX59DGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.27.7.tgz", + "integrity": "sha512-BU2f9tlKQ5CAthiMIgpzAh4eDTLWo1mqi9jqE2OxMG0E/OM199VJt2q8BztTxpnSW0i1ymdwLXRJnYzvDM5r2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.27.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.27.3", + "@babel/helpers": "^7.27.6", + "@babel/parser": "^7.27.7", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.27.7", + "@babel/types": "^7.27.7", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.27.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.5.tgz", + "integrity": "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.27.5", + "@babel/types": "^7.27.3", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", + "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.27.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz", + "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.27.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz", + "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.7" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.7.tgz", + "integrity": "sha512-X6ZlfR/O/s5EQ/SnUSLzr+6kGnkg8HXGMzpgsMsrJVcfDtH1vIp6ctCN4eZ1LS5c0+te5Cb6Y514fASjMRJ1nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.27.5", + "@babel/parser": "^7.27.7", + "@babel/template": "^7.27.2", + "@babel/types": "^7.27.7", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.7.tgz", + "integrity": "sha512-8OLQgDScAOHXnAz2cV+RfzzNMipuLVBz2biuAJFMV9bfkNf393je3VM8CLkjQodW5+iWsSJdSgSWT6rsZoXHPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.2.tgz", + "integrity": "sha512-Vx7qOcmoKkR3qhaQ9qf3GxiVKCEu+zfJddHv6x3dY/9P6+uIwJnmuAur5aB+4FDXf41rRrDnOEGkviX5oYZ67w==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz", + "integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.1.tgz", + "integrity": "sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", + "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz", + "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/type-utils": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz", + "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz", + "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.18.0.tgz", + "integrity": "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz", + "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz", + "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.18.0.tgz", + "integrity": "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz", + "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001726", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz", + "integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", + "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dotenv": { + "version": "16.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.0.tgz", + "integrity": "sha512-Omf1L8paOy2VJhILjyhrhqwLIdstqm1BvcDPKg4NGAlkwEu9ODyrFbvk8UymUOMCT+HXo31jg1lArIrVAAhuGA==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.176", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.176.tgz", + "integrity": "sha512-2nDK9orkm7M9ZZkjO3PjbEd3VUulQLyg5T9O3enJdFvUg46Hzd4DUvTvAuEgbdHYXyFsiG4A5sO9IzToMH1cDg==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "license": "MIT" + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.3.tgz", + "integrity": "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jake/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jake/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock-extended": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jest-mock-extended/-/jest-mock-extended-4.0.0.tgz", + "integrity": "sha512-7BZpfuvLam+/HC+NxifIi9b+5VXj/utUDMPUqrDJehGWVuXPtLS9Jqlob2mJLrI/pg2k1S8DMfKDvEB88QNjaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ts-essentials": "^10.0.2" + }, + "peerDependencies": { + "@jest/globals": "^28.0.0 || ^29.0.0 || ^30.0.0", + "jest": "^24.0.0 || ^25.0.0 || ^26.0.0 || ^27.0.0 || ^28.0.0 || ^29.0.0 || ^30.0.0", + "typescript": "^3.0.0 || ^4.0.0 || ^5.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-8.1.0.tgz", + "integrity": "sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw==", + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^6.1.2" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.4.tgz", + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.1.tgz", + "integrity": "sha512-5xGWRa90Sp2+x1dQtNpIpeOQpTDBs9cZDmA/qs2vDNN2i18PdapqY7CmBeyLlMuGqXJRIOPaCaVZTLNQRWUH/A==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-cwd/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "license": "BSD-3-Clause", + "dependencies": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shx": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.3", + "shelljs": "^0.8.5" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/ts-api-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", + "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-essentials": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-10.1.1.tgz", + "integrity": "sha512-4aTB7KLHKmUvkjNj8V+EdnmuVTiECzn3K+zIbRthumvHu+j44x3w63xpfs0JL3NGIzGXqoQ7AV591xHO+XrOTw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">=4.5.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/ts-jest": { + "version": "29.4.0", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.0.tgz", + "integrity": "sha512-d423TJMnJGu80/eSgfQ5w/R+0zFJvdtTxwtF9KzFFunOpSeD+79lHJQIiAhluJoyGRbvj9NZJsl9WjCUo0ND7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "ejs": "^3.1.10", + "fast-json-stable-stringify": "^2.1.0", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.2", + "type-fest": "^4.41.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jest-util": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/ws": { + "version": "8.18.2", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.2.tgz", + "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.25.67", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.67.tgz", + "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", + "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/whatsapp/package.json b/mcp_servers/whatsapp/package.json new file mode 100644 index 00000000..cf600df8 --- /dev/null +++ b/mcp_servers/whatsapp/package.json @@ -0,0 +1,60 @@ +{ + "name": "@klavis-ai/mcp-server-whatsapp", + "version": "1.0.0", + "description": "MCP server for WhatsApp Business API integration.", + "type": "module", + "bin": { + "whatsapp-mcp": "dist/index.js" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsc && node -e \"require('fs').chmodSync('dist/index.js', '755')\"", + "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js", + "start": "node dist/index.js", + "lint": "eslint src/**/*.ts", + "lint:fix": "eslint src/**/*.ts --fix", + "format": "prettier --write .", + "prepare": "npm run build", + "publish": "npm run build && npm publish" + }, + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.7", + "p-queue": "^8.0.1", + "shx": "^0.3.4", + "ws": "^8.18.1", + "express": "^5.1.0" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/jest": "^29.5.14", + "@types/node": "^20.10.5", + "@types/express": "^5.0.1", + "@typescript-eslint/eslint-plugin": "^7.0.0", + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "jest": "^29.7.0", + "jest-mock-extended": "^4.0.0-beta1", + "prettier": "^3.1.1", + "ts-jest": "^29.1.1", + "typescript": "^5.3.3" + }, + "engines": { + "node": ">=18.0.0" + }, + "keywords": [ + "mcp", + "whatsapp", + "messaging", + "business-api", + "api-integration" + ], + "author": "Klavis AI" +} \ No newline at end of file diff --git a/mcp_servers/whatsapp/tsconfig.json b/mcp_servers/whatsapp/tsconfig.json new file mode 100644 index 00000000..53cea737 --- /dev/null +++ b/mcp_servers/whatsapp/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "outDir": "./dist", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./**/*.ts"], + "exclude": [ + "node_modules", + "dist", + "tests" + ] +} \ No newline at end of file diff --git a/mcp_servers/wordpress/.eslintrc.json b/mcp_servers/wordpress/.eslintrc.json new file mode 100644 index 00000000..6a1b1376 --- /dev/null +++ b/mcp_servers/wordpress/.eslintrc.json @@ -0,0 +1,11 @@ +{ + "root": false, + "extends": [ + "../.eslintrc.js" + ], + "parserOptions": { + "tsconfigRootDir": ".", + "project": "./tsconfig.json" + }, + "rules": {} +} \ No newline at end of file diff --git a/mcp_servers/wordpress/Dockerfile b/mcp_servers/wordpress/Dockerfile new file mode 100755 index 00000000..76a0dbf4 --- /dev/null +++ b/mcp_servers/wordpress/Dockerfile @@ -0,0 +1,32 @@ +FROM node:22.12-alpine AS builder + +# Set the working directory inside the container +WORKDIR /app + +# Copy package.json and package-lock.json to install dependencies +COPY mcp_servers/wordpress/package.json mcp_servers/wordpress/package-lock.json ./ + +# Install dependencies (ignoring scripts to prevent running the prepare script) +RUN npm install --ignore-scripts + +# Copy the rest of the application source code +COPY mcp_servers/wordpress . + +# Build the application using TypeScript +RUN npm run build + +FROM node:22-alpine AS release + +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +ENV NODE_ENV=production + +EXPOSE 5000 + +WORKDIR /app + +RUN npm ci --ignore-scripts --omit-dev + +ENTRYPOINT ["node", "dist/index.js"] \ No newline at end of file diff --git a/mcp_servers/wordpress/README.md b/mcp_servers/wordpress/README.md new file mode 100644 index 00000000..153f8458 --- /dev/null +++ b/mcp_servers/wordpress/README.md @@ -0,0 +1,77 @@ +# WordPress MCP Server + +A Model Context Protocol (MCP) server for WordPress integration. Manage posts, pages, comments, and media using WordPress REST API with OAuth support. + +## šŸš€ Quick Start - Run in 30 Seconds + +### 🌐 Using Hosted Service (Recommended for Production) + +Get instant access to WordPress with our managed infrastructure - **no setup required**: + +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** + +```bash +pip install klavis +# or +npm install klavis +``` + +```python +from klavis import Klavis + +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("WORDPRESS", "user123") +``` + +### 🐳 Using Docker (For Self-Hosting) + +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/wordpress-mcp-server:latest + + +# Run WordPress MCP Server with OAuth Support through Klavis AI +docker run -p 5000:5000 -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/wordpress-mcp-server:latest + +# Run WordPress MCP Server (no OAuth support) +docker run -p 5000:5000 -e AUTH_DATA='{"access_token":"your_wordpress_api_token_here"}' \ + ghcr.io/klavis-ai/wordpress-mcp-server:latest +``` + +**OAuth Setup:** WordPress requires OAuth authentication. Use `KLAVIS_API_KEY` from your [free API key](https://www.klavis.ai/home/api-keys) to handle the OAuth flow automatically. + +## šŸ› ļø Available Tools + +- **Content Management**: Create, read, update posts and pages +- **Media Operations**: Upload and manage media files +- **Comment Management**: Handle comments and moderation +- **User Management**: Manage WordPress users and roles +- **Site Operations**: Access site settings and configurations + +## šŸ“š Documentation & Support + +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | + +## šŸ¤ Contributing + +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. + +## šŸ“œ License + +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. + +--- + +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/wordpress/index.ts b/mcp_servers/wordpress/index.ts new file mode 100755 index 00000000..08781eb1 --- /dev/null +++ b/mcp_servers/wordpress/index.ts @@ -0,0 +1,430 @@ +#!/usr/bin/env node +import express, { Request, Response } from 'express'; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; +import { + CallToolRequestSchema, + ListToolsRequestSchema, +} from "@modelcontextprotocol/sdk/types.js"; +import { AsyncLocalStorage } from 'async_hooks'; + +const getWordPressMcpServer = () => { + const server = new Server( + { + name: "klavis-ai/wordpress", + version: "0.1.0", + }, + { + capabilities: { + resources: {}, + tools: {}, + }, + }, + ); + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + { + name: "wordpress_create_post", + description: "Create a new WordPress post", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + title: { type: "string", description: "Post title in html format" }, + content: { type: "string", description: "Post content in html format" }, + status: { type: "string", description: "Post status (draft, publish, private, pending etc.)", default: "publish" } + }, + required: ["site", "title", "content"] + }, + annotations: { category: 'WORDPRESS_POST' }, + }, + { + name: "wordpress_get_posts", + description: "Get a list of WordPress posts", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + number: { type: "number", description: "Number of posts to retrieve", default: 10 }, + page: { type: "number", description: "Page number", default: 1 } + }, + required: ["site"] + }, + annotations: { category: 'WORDPRESS_POST', readOnlyHint: true }, + }, + { + name: "wordpress_update_post", + description: "Update an existing WordPress post", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + postId: { type: "number", description: "ID of the post to update" }, + title: { type: "string", description: "Post title in html format" }, + content: { type: "string", description: "Post content in html format" }, + status: { type: "string", description: "Post status (draft, publish, private, pending etc.)" }, + }, + required: ["site", "postId"] + }, + annotations: { category: 'WORDPRESS_POST' }, + }, + { + name: "wordpress_get_top_posts", + description: "Get top WordPress posts for a site", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + }, + required: ["site"] + }, + annotations: { category: 'WORDPRESS_POST' } + }, + { + name: "wordpress_get_site_info", + description: "Get information about a WordPress site", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + }, + required: ["site"] + }, + annotations: { category: 'WORDPRESS_SITE' } + }, + { + name: "wordpress_get_site_stats", + description: "Get statistics for a WordPress site", + inputSchema: { + type: "object", + properties: { + site: { type: "string", description: "Site identifier (e.g. example.wordpress.com)" }, + }, + required: ["site"] + }, + annotations: { category: 'WORDPRESS_SITE' } + }, + { + name: "wordpress_get_user_sites", + description: "Get all WordPress sites the authenticated user has access to", + inputSchema: { + type: "object", + properties: {}, + required: [] + }, + annotations: { category: 'WORDPRESS_USER' } + } + ], + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const params = request.params.arguments || {}; + + try { + switch (request.params.name) { + case 'wordpress_create_post': { + if (!params.site || !params.title || !params.content) { + throw new Error('Site, title, and content are required for creating a post'); + } + + const client = getClient(); + const response = await client.post(`/sites/${params.site}/posts/new`, { + title: params.title, + content: params.content, + status: params.status || 'draft', + }); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_get_posts': { + if (!params.site) { + throw new Error('Site is required for getting posts'); + } + + const client = getClient(); + const number = params.number || 10; + const page = params.page || 1; + const response = await client.get(`/sites/${params.site}/posts/?number=${number}&page=${page}`); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_update_post': { + if (!params.site || !params.postId) { + throw new Error('Site and Post ID are required for updating a post'); + } + + const updateData: Record = {}; + if (params.title) updateData.title = params.title; + if (params.content) updateData.content = params.content; + if (params.status) updateData.status = params.status; + + const client = getClient(); + const response = await client.post(`/sites/${params.site}/posts/${params.postId}`, updateData); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_get_top_posts': { + if (!params.site) { + throw new Error('Site is required for getting top posts'); + } + + const client = getClient(); + const response = await client.get(`/sites/${params.site}/stats/top-posts`); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_get_site_info': { + if (!params.site) { + throw new Error('Site identifier is required'); + } + + const client = getClient(); + const response = await client.get(`/sites/${params.site}`); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_get_site_stats': { + if (!params.site) { + throw new Error('Site identifier is required'); + } + + const client = getClient(); + const response = await client.get(`/sites/${params.site}/stats`); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + case 'wordpress_get_user_sites': { + const client = getClient(); + const response = await client.get('/me/sites'); + const data = await response.json(); + + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }], + isError: false, + }; + } + + default: + throw new Error(`Unknown tool: ${request.params.name}`); + } + } catch (error) { + const err = error as Error; + throw new Error(`WordPress API error: ${err.message}`); + } + }); + + return server; +} + +// Create AsyncLocalStorage for request context +const asyncLocalStorage = new AsyncLocalStorage<{ + auth_token: string; +}>(); + +function getClient() { + const store = asyncLocalStorage.getStore()!; + const auth_token = store.auth_token; + + return { + get: async (path: string) => { + const response = await fetch(`https://public-api.wordpress.com/rest/v1.1${path}`, { + method: 'GET', + headers: { + 'Authorization': `Bearer ${auth_token}`, + 'Content-Type': 'application/json', + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`HTTP error ${response.status}: ${errorText}`); + } + + return response; + }, + post: async (path: string, data: any) => { + const response = await fetch(`https://public-api.wordpress.com/rest/v1.1${path}`, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${auth_token}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(data), + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`HTTP error ${response.status}: ${errorText}`); + } + + return response; + } + }; +} + +function extractAccessToken(req: Request): string { + let authData = process.env.AUTH_DATA; + + if (!authData && req.headers['x-auth-data']) { + try { + authData = Buffer.from(req.headers['x-auth-data'] as string, 'base64').toString('utf8'); + } catch (error) { + console.error('Error parsing x-auth-data JSON:', error); + } + } + + if (!authData) { + console.error('Error: WordPress access token is missing. Provide it via AUTH_DATA env var or x-auth-data header with access_token field.'); + return ''; + } + + const authDataJson = JSON.parse(authData); + return authDataJson.access_token ?? ''; +} + +const app = express(); + + +//============================================================================= +// STREAMABLE HTTP TRANSPORT (PROTOCOL VERSION 2025-03-26) +//============================================================================= + +app.post('/mcp', async (req: Request, res: Response) => { + const auth_token = extractAccessToken(req); + + + const server = getWordPressMcpServer(); + try { + const transport: StreamableHTTPServerTransport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + await server.connect(transport); + asyncLocalStorage.run({ auth_token }, async () => { + await transport.handleRequest(req, res, req.body); + }); + res.on('close', () => { + console.log('Request closed'); + transport.close(); + server.close(); + }); + } catch (error) { + console.error('Error handling MCP request:', error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }); + } + } +}); + +app.get('/mcp', async (req: Request, res: Response) => { + console.log('Received GET MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +app.delete('/mcp', async (req: Request, res: Response) => { + console.log('Received DELETE MCP request'); + res.writeHead(405).end(JSON.stringify({ + jsonrpc: "2.0", + error: { + code: -32000, + message: "Method not allowed." + }, + id: null + })); +}); + +//============================================================================= +// DEPRECATED HTTP+SSE TRANSPORT (PROTOCOL VERSION 2024-11-05) +//============================================================================= + +const transports = new Map(); + +app.get("/sse", async (req, res) => { + const transport = new SSEServerTransport(`/messages`, res); + + // Set up cleanup when connection closes + res.on('close', async () => { + console.log(`SSE connection closed for transport: ${transport.sessionId}`); + try { + transports.delete(transport.sessionId); + } finally { + } + }); + + transports.set(transport.sessionId, transport); + + const server = getWordPressMcpServer(); + await server.connect(transport); + + console.log(`SSE connection established with transport: ${transport.sessionId}`); +}); + +app.post("/messages", async (req, res) => { + const sessionId = req.query.sessionId as string; + + let transport: SSEServerTransport | undefined; + transport = sessionId ? transports.get(sessionId) : undefined; + if (transport) { + const auth_token = extractAccessToken(req); + + asyncLocalStorage.run({ + auth_token + }, async () => { + await transport.handlePostMessage(req, res); + }); + } else { + console.error(`Transport not found for session ID: ${sessionId}`); + res.status(404).send({ error: "Transport not found" }); + } +}); + +app.listen(5000, () => { + console.log('server running on port 5000'); +}); diff --git a/mcp_servers/wordpress/package-lock.json b/mcp_servers/wordpress/package-lock.json new file mode 100644 index 00000000..491777b8 --- /dev/null +++ b/mcp_servers/wordpress/package-lock.json @@ -0,0 +1,1779 @@ +{ + "name": "@klavis-ai/mcp-server-wordpress", + "version": "0.6.2", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@klavis-ai/mcp-server-wordpress", + "version": "0.6.2", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.5", + "express": "^4.21.2" + }, + "bin": { + "mcp-server-wordpress": "dist/index.js" + }, + "devDependencies": { + "@types/express": "^5.0.0", + "@types/node": "^22.13.17", + "shx": "^0.3.4", + "typescript": "^5.6.2" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.13.1.tgz", + "integrity": "sha512-8q6+9aF0yA39/qWT/uaIj6zTpC+Qu07DnN/lb9mjoquCJsAh6l3HyYqc9O3t2j7GilseOQOQimLg7W3By6jqvg==", + "license": "MIT", + "dependencies": { + "ajv": "^6.12.6", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "express": "^5.0.1", + "express-rate-limit": "^7.5.0", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.24.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.1.tgz", + "integrity": "sha512-UZUw8vjpWFXuDnjFTh7/5c2TWDlQqeXHi6hcN7F2XSVT5P+WmUnnbFS3KA6Jnc6IsEqI2qCVu2bK0R0J4A8ZQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^5.0.0", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", + "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.13.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.17.tgz", + "integrity": "sha512-nAJuQXoyPj04uLgu+obZcSmsfOenUg6DxPKogeUy6yNCFwWaj5sBF8/G/pNo8EtBJjAfSVgfIlugR/BCOleO+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.20.0" + } + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/body-parser/node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/dotenv": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", + "integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com/" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.6.tgz", + "integrity": "sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz", + "integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.0.tgz", + "integrity": "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": "^4.11 || 5 || ^5.0.0-beta.1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/pkce-challenge": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", + "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dev": true, + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/router/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/router/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/router/node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shx": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.3", + "shelljs": "^0.8.5" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", + "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "dev": true, + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "3.24.2", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz", + "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.5", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz", + "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/mcp_servers/wordpress/package.json b/mcp_servers/wordpress/package.json new file mode 100755 index 00000000..60ec100f --- /dev/null +++ b/mcp_servers/wordpress/package.json @@ -0,0 +1,32 @@ +{ + "name": "@klavis-ai/mcp-server-wordpress", + "version": "0.6.2", + "description": "MCP server for interacting with WordPress sites", + "license": "MIT", + "author": "Klavis AI (https://klavis.ai)", + "homepage": "https://klavis.ai/", + "type": "module", + "bin": { + "mcp-server-wordpress": "dist/index.js" + }, + "files": [ + "dist" + ], + "scripts": { + "start": "tsc && shx chmod +x dist/*.js && node dist/index.js", + "build": "tsc && shx chmod +x dist/*.js", + "prepare": "npm run build", + "watch": "tsc --watch" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.12.1", + "dotenv": "^16.4.5", + "express": "^4.21.2" + }, + "devDependencies": { + "@types/node": "^22.13.17", + "@types/express": "^5.0.0", + "shx": "^0.3.4", + "typescript": "^5.6.2" + } +} \ No newline at end of file diff --git a/mcp_servers/wordpress/tsconfig.json b/mcp_servers/wordpress/tsconfig.json new file mode 100755 index 00000000..850b8a0d --- /dev/null +++ b/mcp_servers/wordpress/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "Node16", + "moduleResolution": "Node16", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "outDir": "./dist", + "rootDir": "." + }, + "exclude": [ + "node_modules" + ], + "include": [ + "./**/*.ts" + ] +} \ No newline at end of file diff --git a/mcp_servers/youtube/.env.example b/mcp_servers/youtube/.env.example index c382ed1e..bb718eb2 100644 --- a/mcp_servers/youtube/.env.example +++ b/mcp_servers/youtube/.env.example @@ -1,4 +1,5 @@ YOUTUBE_API_KEY=YOUR_API_KEY_HERE -# YOUTUBE_MCP_SERVER_PORT=5000 +TRANSCRIPT_LANGUAGE=en,fr,de +# YOUTUBE_MCP_SERVER_PORT=5000 # WEBSHARE_PROXY_USERNAME=YOUR_USERNAME_HERE # WEBSHARE_PROXY_PASSWORD=YOUR_PASSWORD_HERE \ No newline at end of file diff --git a/mcp_servers/youtube/Dockerfile b/mcp_servers/youtube/Dockerfile index f44cd42f..bd88db2e 100644 --- a/mcp_servers/youtube/Dockerfile +++ b/mcp_servers/youtube/Dockerfile @@ -14,8 +14,7 @@ RUN pip install --no-cache-dir -r requirements.txt # Copy the server code COPY mcp_servers/youtube/server.py . -# Copy the environment file -COPY mcp_servers/youtube/.env . +COPY mcp_servers/youtube/.env.example .env # Expose the port the server runs on EXPOSE 5000 diff --git a/mcp_servers/youtube/README.md b/mcp_servers/youtube/README.md index f7f56f88..c9c1cebf 100644 --- a/mcp_servers/youtube/README.md +++ b/mcp_servers/youtube/README.md @@ -1,117 +1,73 @@ # YouTube MCP Server -[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +A Model Context Protocol (MCP) server for YouTube integration. Retrieve video transcripts, details, and metadata using YouTube's Data API. -A Model Context Protocol (MCP) server that retrieves transcripts/subtitles for a given YouTube video and provides video details when transcripts are unavailable. +## šŸš€ Quick Start - Run in 30 Seconds -This server utilizes the `FastMCP` framework for handling MCP requests and the YouTube API. +### 🌐 Using Hosted Service (Recommended for Production) -## Features +Get instant access to YouTube with our managed infrastructure - **no setup required**: -* Provides a simple MCP endpoint to get YouTube video transcripts. -* Accepts a YouTube video URL as input. -* Returns the transcript/subtitles when available. -* Falls back to video details when transcript is unavailable. -* Supports proxy configuration for transcript retrieval. -* Built with Python using `FastMCP`. -* Can be run easily using Docker or a standard Python environment. +**šŸ”— [Get Free API Key →](https://www.klavis.ai/home/api-keys)** -## Environment Variables +```bash +pip install klavis +# or +npm install klavis +``` -The following environment variables are used by the server: +```python +from klavis import Klavis -* **Required**: - * `YOUTUBE_API_KEY`: Your YouTube Data API key (required for fetching video details). +klavis = Klavis(api_key="your-free-key") +server = klavis.mcp_server.create_server_instance("YOUTUBE", "user123") +``` -* **Optional**: - * `YOUTUBE_MCP_SERVER_PORT`: Port for the MCP server (defaults to 5000). - * `WEBSHARE_PROXY_USERNAME`: Username for Webshare proxy (optional, for circumventing regional restrictions). - * `WEBSHARE_PROXY_PASSWORD`: Password for Webshare proxy (optional, for circumventing regional restrictions). +### 🐳 Using Docker (For Self-Hosting) -For more information on using Webshare with YouTube Transcript API, refer to: [YouTube Transcript API - Using Webshare](https://github.com/jdepoix/youtube-transcript-api?tab=readme-ov-file#using-webshare) +```bash +# Pull latest image +docker pull ghcr.io/klavis-ai/youtube-mcp-server:latest -## Running Locally -You can run this server locally using either Docker (recommended) or a Python virtual environment. The instructions assume you are in the root directory of the `klavis` project (the parent directory of `mcp_servers`). +# Run YouTube MCP Server +docker run -p 5000:5000 -e API_KEY=$API_KEY \ + ghcr.io/klavis-ai/youtube-mcp-server:latest +``` -### Prerequisites +**API Key Setup:** Get your YouTube Data API key from the [Google Cloud Console](https://console.cloud.google.com/apis/credentials) and enable the YouTube Data API v3. -* **Docker:** If using the Docker method. -* **Python:** Python 3.11+ if using the virtual environment method. -* **`.env` File:** Create a file named `.env` in the root of the `klavis` project directory with the required environment variables. - ```bash - # Required - YOUTUBE_API_KEY=your_youtube_api_key_here - - # Optional - YOUTUBE_MCP_SERVER_PORT=5000 - WEBSHARE_PROXY_USERNAME=your_proxy_username - WEBSHARE_PROXY_PASSWORD=your_proxy_password - ``` +## šŸ› ļø Available Tools -### Using Docker (Recommended) +- **Video Transcripts**: Retrieve full video transcripts with timestamps +- **Video Details**: Get video metadata including title, description, statistics +- **Video Search**: Search for videos by keywords and filters +- **Channel Information**: Get channel details and video listings +- **Playlist Management**: Access playlist contents and metadata -1. **Build the Docker Image:** - Open your terminal in the root directory of the `klavis` project and run: - ```bash - docker build -t youtube-mcp-server -f mcp_servers/youtube/Dockerfile . - ``` - * `-t youtube-mcp-server`: Assigns a tag (name) to the image. - * `-f mcp_servers/youtube/Dockerfile`: Specifies the path to the Dockerfile. - * `.`: Specifies the build context (the current directory, `klavis`). This is important because the Dockerfile copies files from this context. +## šŸ“š Documentation & Support -2. **Run the Docker Container:** - ```bash - docker run -p 5000:5000 --env-file .env youtube-mcp-server - ``` - * `-p 5000:5000`: Maps port 5000 on your host machine to port 5000 inside the container (where the server listens). - * `--env-file .env`: Provides the environment variables from your `.env` file to the container. - * `youtube-mcp-server`: The name of the image to run. +| Resource | Link | +|----------|------| +| **šŸ“– Documentation** | [www.klavis.ai/docs](https://www.klavis.ai/docs) | +| **šŸ’¬ Discord** | [Join Community](https://discord.gg/p7TuTEcssn) | +| **šŸ› Issues** | [GitHub Issues](https://github.com/klavis-ai/klavis/issues) | -The server should now be running and accessible at `http://localhost:5000`. +## šŸ¤ Contributing -### Using Python Virtual Environment +We welcome contributions! Please see our [Contributing Guide](../../CONTRIBUTING.md) for details. -1. **Navigate to Project Root:** - Ensure your terminal is in the root directory of the `klavis` project. +## šŸ“œ License -2. **Create Virtual Environment:** - ```bash - python -m venv venv - ``` +Apache 2.0 license - see [LICENSE](../../LICENSE) for details. -3. **Activate Virtual Environment:** - * **Linux/macOS:** `source venv/bin/activate` - * **Windows:** `venv\Scripts\activate` +--- -4. **Install Dependencies:** - The server relies on packages listed in `requirements.txt`: - ```bash - pip install -r mcp_servers/youtube/requirements.txt - ``` - -5. **Create `.env` File:** - Ensure you have created the `.env` file in the `klavis` root directory as mentioned in the prerequisites. - -6. **Run the Server:** - ```bash - python mcp_servers/youtube/server.py - ``` - -The server should now be running and accessible at `http://localhost:5000`. - -## Usage - -Once the server is running (either via Docker or Python environment), it listens for MCP requests on port 5000. - -You can interact with it using an MCP client or tool. The available tool is: - -* `get_youtube_video_transcript`: - * **Description:** Retrieve the transcript/subtitles for a given YouTube video. When transcripts are unavailable, it will automatically fall back to fetching video details. - * **Input Parameter:** `url` (string) - The URL of the YouTube video (e.g., https://www.youtube.com/watch?v=dQw4w9WgXcQ). - * **Returns:** (Dict) - Contains the transcript data when available, or video details with an error message when transcript is unavailable. - * **Supported URL Formats:** - * Standard: `youtube.com/watch?v=VIDEO_ID` - * Short: `youtu.be/VIDEO_ID` - * Embedded: `youtube.com/embed/VIDEO_ID` - * Shorts: `youtube.com/shorts/VIDEO_ID` \ No newline at end of file +
+

šŸš€ Supercharge AI Applications

+

+ Get Free API Key • + Documentation • + Discord +

+
diff --git a/mcp_servers/youtube/requirements.txt b/mcp_servers/youtube/requirements.txt index e2200f07..8f29ce59 100644 --- a/mcp_servers/youtube/requirements.txt +++ b/mcp_servers/youtube/requirements.txt @@ -1,4 +1,4 @@ -mcp>=1.6.0 +mcp==1.11.0 pydantic fastapi uvicorn[standard] diff --git a/mcp_servers/youtube/server.py b/mcp_servers/youtube/server.py index f995fd75..2a6b89f0 100644 --- a/mcp_servers/youtube/server.py +++ b/mcp_servers/youtube/server.py @@ -1,20 +1,33 @@ -import os +import contextlib import logging +import os import re -from typing import Any, Dict, Annotated +from collections.abc import AsyncIterator +from typing import Any, Dict from urllib.parse import urlparse, parse_qs + +import click +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send +from pydantic import Field from dotenv import load_dotenv import aiohttp -from mcp.server.fastmcp import FastMCP -from pydantic import Field +import asyncio from youtube_transcript_api import YouTubeTranscriptApi from youtube_transcript_api.proxies import WebshareProxyConfig -load_dotenv() +# Configure logging +logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("youtube-mcp-server") +load_dotenv() +# YouTube API constants and configuration YOUTUBE_API_KEY = os.getenv("YOUTUBE_API_KEY") if not YOUTUBE_API_KEY: raise ValueError("YOUTUBE_API_KEY environment variable is required") @@ -25,12 +38,7 @@ YOUTUBE_API_BASE = "https://www.googleapis.com/youtube/v3" YOUTUBE_MCP_SERVER_PORT = int(os.getenv("YOUTUBE_MCP_SERVER_PORT", "5000")) - -mcp = FastMCP( - "Youtube", - instructions="Retrieve the transcript or video details for a given YouTube video.", - port=YOUTUBE_MCP_SERVER_PORT, -) +TRANSCRIPT_LANGUAGES = [lang.strip() for lang in os.getenv("TRANSCRIPT_LANGUAGE", "en").split(',')] # Initialize YouTube Transcript API with proxy if credentials are available if WEBSHARE_PROXY_USERNAME and WEBSHARE_PROXY_PASSWORD: @@ -131,14 +139,7 @@ async def _make_youtube_request(endpoint: str, params: Dict[str, Any], headers: logger.error(f"An unexpected error occurred during YouTube API request: {e}") raise RuntimeError(f"Unexpected error during API call to {url}") from e -async def get_video_details( - video_id: Annotated[ - str, - Field( - description="The ID of the YouTube video to get details for." - ), - ] -) -> Dict[str, Any]: +async def get_video_details(video_id: str) -> Dict[str, Any]: """Get detailed information about a specific YouTube video.""" logger.info(f"Executing tool: get_video_details with video_id: {video_id}") try: @@ -177,60 +178,197 @@ async def get_video_details( logger.exception(f"Error executing tool get_video_details: {e}") raise e -@mcp.tool() -async def get_youtube_video_transcript( - url: Annotated[ - str, - Field( - description="The URL of the YouTube video to retrieve the transcript/subtitles for. (e.g. https://www.youtube.com/watch?v=dQw4w9WgXcQ)" - ), - ], -) -> Dict[str, Any]: - """ - Retrieve the transcript or video details for a given YouTube video. - The 'start' time in the transcript is formatted as MM:SS or HH:MM:SS. - """ - try: - video_id = _extract_video_id(url) - logger.info(f"Executing tool: get_video_transcript with video_id: {video_id}") + +@click.command() +@click.option("--port", default=YOUTUBE_MCP_SERVER_PORT, help="Port to listen on for HTTP") +@click.option( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", +) +@click.option( + "--json-response", + is_flag=True, + default=False, + help="Enable JSON responses for StreamableHTTP instead of SSE streams", +) +def main( + port: int, + log_level: str, + json_response: bool, +) -> int: + # Configure logging + logging.basicConfig( + level=getattr(logging, log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Create the MCP server instance + app = Server("youtube-mcp-server") + + @app.list_tools() + async def list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="get_youtube_video_transcript", + description="Retrieve the transcript or video details for a given YouTube video. The 'start' time in the transcript is formatted as MM:SS or HH:MM:SS.", + inputSchema={ + "type": "object", + "required": ["url"], + "properties": { + "url": { + "type": "string", + "description": "The URL of the YouTube video to retrieve the transcript/subtitles for. (e.g. https://www.youtube.com/watch?v=dQw4w9WgXcQ)", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_TRANSCRIPT", "readOnlyHint": True}), + ) + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + + if name == "get_youtube_video_transcript": + url = arguments.get("url") + if not url: + return [ + types.TextContent( + type="text", + text="Error: URL parameter is required", + ) + ] + + try: + result = await get_youtube_video_transcript(url) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + return [ + types.TextContent( + type="text", + text=f"Unknown tool: {name}", + ) + ] + + async def get_youtube_video_transcript(url: str) -> Dict[str, Any]: + """ + Retrieve the transcript or video details for a given YouTube video. + The 'start' time in the transcript is formatted as MM:SS or HH:MM:SS. + """ try: - # Use the initialized API with or without proxy - raw_transcript = youtube_transcript_api.fetch(video_id).to_raw_data() + video_id = _extract_video_id(url) + logger.info(f"Executing tool: get_video_transcript with video_id: {video_id}") - # Format the start time for each segment - formatted_transcript = [ - {**segment, 'start': _format_time(segment['start'])} - for segment in raw_transcript - ] + try: + # Use the initialized API with or without proxy + raw_transcript = youtube_transcript_api.fetch(video_id, languages=TRANSCRIPT_LANGUAGES).to_raw_data() + + # Format the start time for each segment + formatted_transcript = [ + {**segment, 'start': _format_time(segment['start'])} + for segment in raw_transcript + ] + return { + "video_id": video_id, + "transcript": formatted_transcript + } + except Exception as transcript_error: + logger.warning(f"Error fetching transcript: {transcript_error}. Falling back to video details.") + # Fall back to get_video_details + video_details = await get_video_details(video_id) + return { + "video_id": video_id, + "video_details": video_details, + } + except ValueError as e: + logger.exception(f"Invalid YouTube URL: {e}") return { - "video_id": video_id, - "transcript": formatted_transcript + "error": f"Invalid YouTube URL: {str(e)}" } - except Exception as transcript_error: - logger.warning(f"Error fetching transcript: {transcript_error}. Falling back to video details.") - # Fall back to get_video_details - video_details = await get_video_details(video_id) + except Exception as e: + error_message = str(e) + logger.exception(f"Error processing video URL {url}: {error_message}") return { - "video_id": video_id, - "video_details": video_details, + "error": f"Failed to process request: {error_message}" } - except ValueError as e: - logger.exception(f"Invalid YouTube URL: {e}") - return { - "error": f"Invalid YouTube URL: {str(e)}" - } - except Exception as e: - error_message = str(e) - logger.exception(f"Error processing video URL {url}: {error_message}") - return { - "error": f"Failed to process request: {error_message}" - } -def main(): - mcp.run(transport="sse") + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection") + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode - can be changed to use an event store + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request") + await session_manager.handle_request(scope, receive, send) + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager.""" + async with session_manager.run(): + logger.info("Application started with dual transports!") + try: + yield + finally: + logger.info("Application shutting down...") + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Server starting on port {port} with dual transports:") + logger.info(f" - SSE endpoint: http://localhost:{port}/sse") + logger.info(f" - StreamableHTTP endpoint: http://localhost:{port}/mcp") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + return 0 if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/open-strata/.gitignore b/open-strata/.gitignore new file mode 100644 index 00000000..dcc9a592 --- /dev/null +++ b/open-strata/.gitignore @@ -0,0 +1,13 @@ +# Python-generated files +__pycache__/ +*.py[oc] +build/ +dist/ +wheels/ +*.egg-info + +# Virtual environments +.venv +.claude +.cursor +.tokens diff --git a/open-strata/.python-version b/open-strata/.python-version new file mode 100644 index 00000000..24ee5b1b --- /dev/null +++ b/open-strata/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/open-strata/LICENSE b/open-strata/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/open-strata/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/open-strata/README.md b/open-strata/README.md new file mode 100644 index 00000000..be722f5c --- /dev/null +++ b/open-strata/README.md @@ -0,0 +1,246 @@ +
+ image +
+ +[![Documentation](https://img.shields.io/badge/Documentation-šŸ“–-green)](https://www.klavis.ai/docs) +[![Website](https://img.shields.io/badge/Website-🌐-purple)](https://www.klavis.ai) +[![Discord](https://img.shields.io/badge/Discord-Join-7289DA?logo=discord&logoColor=white)](https://discord.gg/p7TuTEcssn) + +# Overview + +**Strata** is a unified MCP server that intelligently manages any tool across one or multiple applications, presenting them to AI agents progressively without overwhelming the model context. + +> [!TIP] +> **šŸš€ Looking for production-ready solution?** We offer a **fully managed Strata service** with enterprise-grade performance, access to more features and advanced AI model optimizations. [Get started instantly →](https://www.klavis.ai/) + +**This repository contains our open-source implementation** - a streamlined version you can easily deploy as your MCP aggregation manager in your local infrastructure. + +### šŸš€ The Problem We Solve + +Imagine you have: +- šŸ“¦ Dozens of MCP servers, each with dozens of tools +- šŸ¤– AI models seeing 100s-1000s of tools at once +- šŸ“‰ Degraded performance due to massive context + +### ✨ The Strata Solution + +Instead of flooding your AI model with hundreds of tools, Strata acts as an intelligent router: + +šŸ” **Smart Discovery** → Only exposes a few essential tools to the model +šŸŽÆ **Progressive Access** → Finds and surfaces the right tool when needed +⚔ **Optimized Context** → Maintains peak model performance +šŸ”— **Seamless Integration** → Works with your existing MCP servers, and easy configuration + +# Quick Start + +## Installation + +```bash +pipx install strata-mcp +``` + +Or with pip: +```bash +pip install strata-mcp +``` + +For development: +```bash +pip install -e . +``` + +## Configure MCP Servers + +You can configure your MCP servers by strata CLI tool. Or manually configure it in a JSON file, just like your other MCP JSON config files. + +### Add MCP servers + +- Stdio Server +```bash +strata add --type stdio npx @playwright/mcp@latest +``` + +- SSE Server +```bash +strata add --type sse http://localhost:8080/mcp/ --env API_KEY=your_key +``` + +- HTTP Server +```bash +strata add --type http https://api.githubcopilot.com/mcp/ --header "Authorization=Bearer token" +``` +- HTTP Server with OAuth +```bash +strata add --type http https://mcp.notion.com/mcp --auth_type oauth +``` + + +### List Servers +```bash +strata list +``` + +### Enable/Disable Servers +```bash +strata enable +strata disable +``` + +### Remove Servers +```bash +strata remove server-name +``` + +### Manual Configuration + +Configuration is stored in `~/.config/strata/servers.json` by default. You can specify a custom config path: + +```bash +strata --config-path /path/to/config.json add --type stdio ... +``` + +#### Config Format + +```json +{ + "mcp": { + "servers": { + "github": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-github"], + "env": { + "GITHUB_TOKEN": "your_token" + }, + "enabled": true + }, + "api-server": { + "type": "http", + "url": "https://api.example.com/mcp", + "headers": { + "Authorization": "Bearer token" + }, + "enabled": true + } + } + } +} +``` + +#### Environment Variables + +- `MCP_CONFIG_PATH` - Custom config file path +- `MCP_ROUTER_PORT` - Default port for HTTP/SSE server (default: 8080) + +## Running Strata MCP servers + +Strata itself is a MCP server. You can run and use it like a normal MCP, in stdio mode or http/sse mode. + +### Stdio Mode (Default) +Run without arguments to start in stdio mode for direct MCP communication: +```bash +python -m strata +# or +strata +``` + +### HTTP/SSE Server Mode +Run with port to start as HTTP/SSE server: +```bash +strata run --port 8080 +``` + +## Tool Integration + +Strata can automatically configure itself in various AI assistants and IDEs that support MCP. + +#### Add Strata to Claude Code +```bash +# Add to user configuration (default) +strata tool add claude + +# Add to project-specific configuration +strata tool add claude --scope project +``` + +#### Add Strata to Gemini +```bash +strata tool add gemini +``` + +#### Add Strata to VSCode +```bash +strata tool add vscode +``` + +#### Add Strata to Cursor +```bash +# Add to user configuration (~/.cursor/mcp.json) +strata tool add cursor --scope user + +# Add to project configuration (.cursor/mcp.json) +strata tool add cursor --scope project +``` + +**Supported scopes:** +- `user`: Global configuration (default) +- `project`: Project-specific configuration +- `local`: Same as project (for Cursor) + +Note: VSCode doesn't support scope parameter and will use its default behavior. + +## Available Tools + +When running as a router, the following tools are exposed: + +- `discover_server_actions` - Discover available actions from configured servers +- `get_action_details` - Get detailed information about a specific action +- `execute_action` - Execute an action on a target server +- `search_documentation` - Search server documentation +- `handle_auth_failure` - Handle authentication issues + +# Development + +### Running Tests +```bash +pytest +``` + +### Project Structure +- `src/strata/` - Main source code + - `cli.py` - Command-line interface + - `server.py` - Server implementation (stdio/HTTP/SSE) + - `tools.py` - Tool implementations + - `mcp_client_manager.py` - MCP client management + - `config.py` - Configuration management + +## Examples + +### Running GitHub MCP Server through Router +```bash +# Add GitHub server (official HTTP server) +strata add --type http github https://api.githubcopilot.com/mcp/ + +# Run router in stdio mode +strata + +# Or run as HTTP server +strata run --port 8080 +``` + +### Running Multiple Servers +```bash +# Add multiple servers +strata add --type stdio playwright npx @playwright/mcp@latest +strata add --type http github https://api.githubcopilot.com/mcp/ + +# List all servers +strata list + +# Run router with all enabled servers +strata run --port 8080 +``` + +--- + +šŸ‘‰ **[Get Instant Access at Klavis AI (YC X25)](https://klavis.ai/)** šŸ‘ˆ + diff --git a/open-strata/pyproject.toml b/open-strata/pyproject.toml new file mode 100644 index 00000000..0eed7b66 --- /dev/null +++ b/open-strata/pyproject.toml @@ -0,0 +1,62 @@ +[project] +name = "strata-mcp" +version = "1.0.2" +description = "" +readme = "README.md" +license = { text = "Apache-2.0" } +authors = [ + { name = "Klavis AI", email = "connect@klavis.ai" }, +] +keywords = ["mcp", "model-context-protocol", "router", "ai", "llm", "tools"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Communications", + "Topic :: System :: Distributed Computing", +] +requires-python = ">=3.10" +urls = { Homepage = "https://www.klavis.ai/", Repository = "https://github.com/Klavis-AI/klavis.git", Issues = "https://github.com/Klavis-AI/klavis/issues" } +dependencies = [ + "bm25s>=0.2.14", + "mcp>=1.0.0", + "platformdirs>=4.4.0", + "pystemmer>=3.0.0", + "starlette>=0.37.0", + "uvicorn>=0.30.0", + "watchgod>=0.7", +] + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "pytest-asyncio>=0.21.0", +] + +[project.scripts] +strata = "strata.main:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/strata"] + +[tool.pytest.ini_options] +markers = [ + "integration: marks tests as integration tests with real MCP servers (deselect with '-m \"not integration\"')", +] + +[dependency-groups] +dev = [ + "hatchling>=1.27.0", +] diff --git a/open-strata/src/strata/__init__.py b/open-strata/src/strata/__init__.py new file mode 100644 index 00000000..f3358174 --- /dev/null +++ b/open-strata/src/strata/__init__.py @@ -0,0 +1,6 @@ +"""Strata MCP Router package.""" + +from .main import main + +__version__ = "0.1.0" +__all__ = ["main"] diff --git a/open-strata/src/strata/__main__.py b/open-strata/src/strata/__main__.py new file mode 100644 index 00000000..a1751212 --- /dev/null +++ b/open-strata/src/strata/__main__.py @@ -0,0 +1,6 @@ +"""Entry point for running as a module.""" + +from .main import main + +if __name__ == "__main__": + main() diff --git a/open-strata/src/strata/cli.py b/open-strata/src/strata/cli.py new file mode 100644 index 00000000..4d31163c --- /dev/null +++ b/open-strata/src/strata/cli.py @@ -0,0 +1,364 @@ +"""Command-line interface for Strata MCP Router using argparse.""" + +import argparse +import os +import asyncio +import sys +from pathlib import Path +from urllib.parse import urlparse + +from strata.mcp_client_manager import MCPClientManager + +from .config import MCPServerConfig, MCPServerList +from .logging_config import setup_logging +from .server import run_server, run_stdio_server +from .utils.tool_integration import add_strata_to_tool + + +def add_command(args): + """Add a new MCP server configuration.""" + server_list = MCPServerList(args.config_path) + + # Normalize type (command is alias for stdio) + server_type = args.type + if server_type == "command": + server_type = "stdio" + + # Parse environment variables + env_dict = {} + if args.env: + for env_var in args.env: + if "=" not in env_var: + print( + f"Error: Invalid environment variable format: {env_var}", + file=sys.stderr, + ) + print( + "Environment variables must be in KEY=VALUE format", file=sys.stderr + ) + return 1 + key, value = env_var.split("=", 1) + env_dict[key.strip()] = value + + # Parse headers for URL type + headers_dict = {} + if args.header: + for header_var in args.header: + if ":" not in header_var: + print(f"Error: Invalid header format: {header_var}", file=sys.stderr) + print("Headers must be in KEY:VALUE format", file=sys.stderr) + return 1 + key, value = header_var.split(":", 1) + headers_dict[key.strip()] = value.strip() + + if server_type in ["sse", "http"]: + # Parse HTTP URL to extract base URL and any query parameters + parsed = urlparse(args.url_or_command) + + # Reconstruct URL without query parameters for the URL field + base_url = f"{parsed.scheme}://{parsed.netloc}{parsed.path}" + if parsed.path == "" or parsed.path == "/": + base_url = f"{parsed.scheme}://{parsed.netloc}" + + auth = args.auth_type[0] if args.auth_type else "" + config = MCPServerConfig( + name=args.name, + type=server_type, + url=base_url, + headers=headers_dict, + env=env_dict, + enabled=args.enabled, + auth=auth + ) + else: # stdio/command + # For stdio, url_or_command is the command, args.args contains the arguments + config = MCPServerConfig( + name=args.name, + type="stdio", + command=args.url_or_command, + args=args.args if args.args else [], + env=env_dict, + enabled=args.enabled, + ) + + # Add server to configuration + if server_list.add_server(config): + server_list.save() + print(f"āœ“ Successfully added server '{args.name}' with {server_type} type") + else: + print(f"Server '{args.name}' already exists with identical configuration") + + return 0 + + +def remove_command(args): + """Remove an MCP server configuration.""" + server_list = MCPServerList(args.config_path) + + if args.name in server_list.servers: + del server_list.servers[args.name] + server_list.save() + print(f"āœ“ Successfully removed server '{args.name}'") + return 0 + else: + print(f"Error: Server '{args.name}' not found", file=sys.stderr) + return 1 + + +def list_command(args): + """List all configured MCP servers.""" + server_list = MCPServerList(args.config_path) + + if not server_list.servers: + print("No servers configured") + return 0 + + print("Configured MCP servers:") + for name, config in server_list.servers.items(): + status = "enabled" if config.enabled else "disabled" + type_str = config.type or "stdio" + + if type_str in ["sse", "http"]: + location = config.url + else: + location = f"{config.command} {' '.join(config.args or [])}" + + print(f" • {name} ({type_str}, {status}): {location}") + + return 0 + + +def enable_command(args): + """Enable an MCP server.""" + server_list = MCPServerList(args.config_path) + + if args.name not in server_list.servers: + print(f"Error: Server '{args.name}' not found", file=sys.stderr) + return 1 + + server_list.servers[args.name].enabled = True + server_list.save() + print(f"āœ“ Enabled server '{args.name}'") + return 0 + + +def disable_command(args): + """Disable an MCP server.""" + server_list = MCPServerList(args.config_path) + + if args.name not in server_list.servers: + print(f"Error: Server '{args.name}' not found", file=sys.stderr) + return 1 + + server_list.servers[args.name].enabled = False + server_list.save() + print(f"āœ“ Disabled server '{args.name}'") + return 0 + +async def authenticate(server_name): + # Only initialize the specific server we want to authenticate + async with MCPClientManager(server_names=[server_name]) as client_manager: + await client_manager.authenticate_server(server_name) + +def authenticate_command(args): + """Authenticate with an OAuth2 provider for a given MCP server URL.""" + server_list = MCPServerList(args.config_path) + + if args.name in server_list.servers: + asyncio.run(authenticate(args.name)) + return 0 + else: + print(f"Error: Server '{args.name}' not found", file=sys.stderr) + return 1 + +def tool_add_command(args): + """Add Strata MCP server to Claude, Gemini, VSCode, or Cursor configurations.""" + return add_strata_to_tool(args.target, args.scope or "user") + + +def run_command(args): + """Run the Strata MCP router.""" + # Initialize server with config path if provided + if args.config_path: + import os + + os.environ["MCP_CONFIG_PATH"] = str(args.config_path) + + if args.port is not None: + # Server mode with HTTP/SSE + print(f"Starting Strata MCP Router server on port {args.port}...") + return run_server( + args.port, json_response=False + ) + else: + # Stdio mode + print("Starting Strata MCP Router in stdio mode...", file=sys.stderr) + return run_stdio_server() + + +def create_parser(): + """Create the argument parser.""" + parser = argparse.ArgumentParser( + prog="strata", description="Strata MCP Router - Manage and run MCP servers" + ) + + # Global options + # Check environment variables for default config path + default_config_path = os.environ.get("STRATA_CONFIG_PATH") + parser.add_argument( + "--config-path", + type=Path, + default=Path(default_config_path) if default_config_path else None, + help="Path to configuration file (default: $STRATA_CONFIG_PATH or $MCP_CONFIG_PATH or ~/.config/strata/servers.json)", + ) + + subparsers = parser.add_subparsers(dest="command", help="Available commands") + + # Add command - use REMAINDER to capture all remaining args including those starting with - + add_parser = subparsers.add_parser("add", help="Add a new MCP server configuration") + add_parser.add_argument( + "--type", + choices=["sse", "http", "stdio", "command"], + required=True, + help="Type of the MCP server (sse/http for URL-based, stdio/command for process)", + ) + add_parser.add_argument( + "--env", action="https://github.com/append", help="Environment variables in KEY=VALUE format" + ) + add_parser.add_argument( + "--header", + action="https://github.com/append", + help="HTTP headers in KEY:VALUE format (for URL type only)", + ) + add_parser.add_argument( + "--auth_type", + action="https://github.com/append", + help="Authentication type (oauth etc.)", + ) + add_parser.add_argument( + "--enabled/--disabled", + dest="enabled", + default=True, + action=argparse.BooleanOptionalAction, + help="Whether the server is enabled by default", + ) + add_parser.add_argument("name", help="Name of the server") + add_parser.add_argument( + "url_or_command", help="URL for HTTP/SSE or command for stdio" + ) + add_parser.add_argument( + "args", + nargs="*", + help="Arguments for stdio command (use -- before args starting with -)", + ) + add_parser.set_defaults(func=add_command) + + # Remove command + remove_parser = subparsers.add_parser( + "remove", help="Remove an MCP server configuration" + ) + remove_parser.add_argument("name", help="Name of the server to remove") + remove_parser.set_defaults(func=remove_command) + + # List command + list_parser = subparsers.add_parser("list", help="List all configured MCP servers") + list_parser.set_defaults(func=list_command) + + # Enable command + enable_parser = subparsers.add_parser("enable", help="Enable an MCP server") + enable_parser.add_argument("name", help="Name of the server to enable") + enable_parser.set_defaults(func=enable_command) + + # Disable command + disable_parser = subparsers.add_parser("disable", help="Disable an MCP server") + disable_parser.add_argument("name", help="Name of the server to disable") + disable_parser.set_defaults(func=disable_command) + + # Authenticate command + auth_parser = subparsers.add_parser("auth", help="Authenticate with an MCP server") + auth_parser.add_argument("name", help="Name of the server to authenticate with") + auth_parser.set_defaults(func=authenticate_command) + + # Run command + run_parser = subparsers.add_parser("run", help="Run the Strata MCP router") + run_parser.add_argument( + "--port", + type=int, + help="Port to listen on for HTTP/SSE server mode. If not provided, runs in stdio mode.", + ) + run_parser.add_argument( + "--log-level", + default="INFO", + help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", + ) + run_parser.add_argument( + "--no-banner", + action="https://github.com/store_true", + help="Skip displaying the banner on startup", + ) + run_parser.set_defaults(func=run_command) + + # Set run as default command when no subcommand is provided + parser.set_defaults( + command="run", func=run_command, port=None, log_level="INFO", no_banner=False + ) + + # Tool command + tool_parser = subparsers.add_parser("tool", help="Tool integration commands") + tool_subparsers = tool_parser.add_subparsers( + dest="tool_command", help="Tool commands" + ) + + # Tool add command + tool_add_parser = tool_subparsers.add_parser( + "add", help="Add Strata to Claude, Gemini, VSCode, or Cursor MCP configuration" + ) + tool_add_parser.add_argument( + "target", + choices=["claude", "gemini", "vscode", "cursor"], + help="Target CLI to add Strata to (claude, gemini, vscode, or cursor)", + ) + tool_add_parser.add_argument( + "--scope", + choices=["user", "project", "local"], + default="user", + help="Configuration scope (user, project, or local). Default: user. Note: VSCode doesn't support scope.", + ) + tool_add_parser.set_defaults(func=tool_add_command) + + return parser + + +def main(): + """Main entry point for the MCP Router.""" + parser = create_parser() + args = parser.parse_args() + + # Initialize logging with appropriate settings + log_level = getattr(args, "log_level", "INFO") + no_banner = getattr(args, "no_banner", False) + # banner is only relevant when running the server + if args.command != "run": + no_banner = True + setup_logging(log_level, no_banner) + + # If no subcommand provided, show help + if not hasattr(args, "func"): + parser.print_help() + return 1 + + # Handle tool subcommand specially + if args.command == "tool" and not hasattr(args, "func"): + # No tool subcommand provided, show help message + print("Error: No tool subcommand provided", file=sys.stderr) + print("\nAvailable tool commands:") + print(" add Add Strata to Claude, Gemini, or VSCode MCP configuration") + print("\nUse 'strata tool --help' for more information on a command.") + return 1 + + # Execute the command + return args.func(args) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/open-strata/src/strata/config.py b/open-strata/src/strata/config.py new file mode 100644 index 00000000..a61fb2a4 --- /dev/null +++ b/open-strata/src/strata/config.py @@ -0,0 +1,308 @@ +"""Configuration for MCP servers similar to VSCode's MCP server list.""" + +import json +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional + +from platformdirs import user_config_dir +from watchgod import awatch + + +@dataclass +class MCPServerConfig: + """Configuration for a single MCP server.""" + + name: str + # Transport type + type: str = "stdio" # "stdio", "sse", or "http" + # For stdio/command type + command: Optional[str] = None + args: List[str] = field(default_factory=list) + # For url type + url: Optional[str] = None + headers: Dict[str, str] = field(default_factory=dict) + # Common fields + env: Dict[str, str] = field(default_factory=dict) + enabled: bool = True + # Authentication info could be added here later + auth: str = "" # "none", "oauth2", etc. + + def __post_init__(self): + """Validate configuration after initialization.""" + if self.type in ["sse", "http"]: + if not self.url: + raise ValueError(f"{self.type.upper()} type requires 'url' field") + else: # stdio/command + if not self.command: + raise ValueError("Command type requires 'command' field") + + def to_dict(self) -> Dict[str, Any]: + """Convert to dictionary representation.""" + result = { + "name": self.name, + "type": self.type, + "enabled": self.enabled, + } + + if self.type in ["sse", "http"]: + result["url"] = self.url + if self.headers: + result["headers"] = self.headers + else: # stdio/command + result["command"] = self.command + result["args"] = self.args + + if self.env: + result["env"] = self.env + if self.auth: + result["auth"] = self.auth + + return result + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "MCPServerConfig": + """Create from dictionary representation.""" + type_val = data.get("type", "stdio") + + if type_val in ["sse", "http"]: + return cls( + name=data["name"], + type=type_val, + url=data.get("url"), + headers=data.get("headers", {}), + env=data.get("env", {}), + enabled=data.get("enabled", True), + auth=data.get("auth", ""), + ) + else: # stdio/command + return cls( + name=data["name"], + type=type_val, + command=data.get("command"), + args=data.get("args", []), + env=data.get("env", {}), + enabled=data.get("enabled", True), + ) + + +class MCPServerList: + """Manage a list of MCP server configurations.""" + + def __init__(self, config_path: Optional[Path] = None, use_mcp_format: bool = True): + """Initialize the MCP server list. + + Args: + config_path: Path to the configuration file. If None, uses default. + use_mcp_format: If True, save in MCP format. If False, use legacy format. + """ + if config_path is None: + # Use platformdirs for cross-platform config directory + config_dir = Path(user_config_dir("strata")) + config_dir.mkdir(parents=True, exist_ok=True) + self.config_path = config_dir / "servers.json" + else: + self.config_path = Path(config_path) + + self.servers: Dict[str, MCPServerConfig] = {} + self.use_mcp_format = use_mcp_format + self.load() + + def load(self) -> None: + """Load server configurations from file.""" + if self.config_path.exists(): + try: + with open(self.config_path, "r", encoding="utf-8") as f: + data = json.load(f) + + # Check if it's MCP format (has "mcp" key with "servers" inside) + if "mcp" in data and "servers" in data["mcp"]: + # Parse MCP format + for name, config in data["mcp"]["servers"].items(): + # MCP format doesn't have "name" field, add it + config_dict = { + "name": name, + "type": config.get("type", "stdio"), + "env": config.get("env", {}), + "enabled": config.get("enabled", True), + } + + # Add type-specific fields + if config.get("type") in ["sse", "http"]: + config_dict["url"] = config.get("url") + config_dict["headers"] = config.get("headers", {}) + config_dict["auth"] = config.get("auth", "") + else: # stdio/command + config_dict["command"] = config.get("command", "") + config_dict["args"] = config.get("args", []) + + self.servers[name] = MCPServerConfig.from_dict(config_dict) + # Otherwise check for legacy format + elif "servers" in data: + # Parse legacy format + for name, config in data["servers"].items(): + self.servers[name] = MCPServerConfig.from_dict(config) + except Exception as e: + print(f"Error loading config from {self.config_path}: {e}") + + def save(self) -> None: + """Save server configurations to file.""" + if self.use_mcp_format: + # Save in MCP format + servers_dict = {} + for name, server in self.servers.items(): + server_config = {} + + # Add type field if not stdio (default) + if server.type and server.type != "stdio": + server_config["type"] = server.type + + # Add type-specific fields + if server.type in ["sse", "http"]: + server_config["url"] = server.url + if server.headers: + server_config["headers"] = server.headers + if server.auth: + server_config["auth"] = server.auth + else: # stdio/command + server_config["command"] = server.command + server_config["args"] = server.args + + if server.env: + server_config["env"] = server.env + # Always save enabled field to be explicit + server_config["enabled"] = server.enabled + servers_dict[name] = server_config + + data = {"mcp": {"servers": servers_dict}} + else: + # Save in legacy format + data = { + "servers": { + name: server.to_dict() for name, server in self.servers.items() + } + } + + self.config_path.parent.mkdir(parents=True, exist_ok=True) + with open(self.config_path, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2) + + def add_server(self, server: MCPServerConfig) -> bool: + """Add or update a server configuration. + + Args: + server: Server configuration to add or update. + If a server with the same name exists, it will be overridden. + + Returns: + True if server was added/updated, False if configuration is identical + """ + if server.name in self.servers and self.servers[server.name] == server: + # Configuration is identical, no need to update + return False + + # Add or update server + self.servers[server.name] = server + self.save() + return True + + def remove_server(self, name: str) -> bool: + """Remove a server configuration. + + Args: + name: Name of the server to remove + + Returns: + True if server was removed, False if not found + """ + if name in self.servers: + del self.servers[name] + self.save() + return True + return False + + def get_server(self, name: str) -> Optional[MCPServerConfig]: + """Get a server configuration by name. + + Args: + name: Name of the server + + Returns: + Server configuration or None if not found + """ + return self.servers.get(name) + + def list_servers(self, enabled_only: bool = False) -> List[MCPServerConfig]: + """List all server configurations. + + Args: + enabled_only: If True, only return enabled servers + + Returns: + List of server configurations + """ + servers = list(self.servers.values()) + if enabled_only: + servers = [s for s in servers if s.enabled] + return servers + + def enable_server(self, name: str) -> bool: + """Enable a server. + + Args: + name: Name of the server to enable + + Returns: + True if server was enabled, False if not found + """ + if name in self.servers: + self.servers[name].enabled = True + self.save() + return True + return False + + def disable_server(self, name: str) -> bool: + """Disable a server. + + Args: + name: Name of the server to disable + + Returns: + True if server was disabled, False if not found + """ + if name in self.servers: + self.servers[name].enabled = False + self.save() + return True + return False + + async def watch_config( + self, + on_changed: Callable[[Dict[str, MCPServerConfig]], None], + ): + """Watch configuration file for changes and trigger callback. + + Args: + on_changed: Callback when configuration changes, receives new server dict + """ + async for changes in awatch(str(self.config_path.parent)): + # Check if our config file was modified + config_changed = False + for _, path in changes: + if Path(path) == self.config_path: + config_changed = True + break + + if not config_changed: + continue + + # Reload configuration + self.servers.clear() + self.load() + + # Trigger callback with new server list + on_changed(dict(self.servers)) + + +# Global instance for easy access +mcp_server_list = MCPServerList() diff --git a/open-strata/src/strata/logging_config.py b/open-strata/src/strata/logging_config.py new file mode 100644 index 00000000..bdd8873c --- /dev/null +++ b/open-strata/src/strata/logging_config.py @@ -0,0 +1,109 @@ +"""Logging configuration for Strata MCP Router.""" + +import logging +from datetime import datetime +from pathlib import Path + +from platformdirs import user_cache_dir + + +class BannerFormatter(logging.Formatter): + """Custom formatter for banner messages that shows only the message.""" + + def format(self, record): + return record.getMessage() + + +def log_banner() -> None: + """Log the Klavis AI colorful banner using a temporary handler.""" + # Create a temporary logger with custom formatter + banner_logger = logging.getLogger("banner") + banner_logger.setLevel(logging.INFO) + + # Create a handler with no formatting for clean output + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + console_handler.setFormatter(BannerFormatter()) + + banner_logger.addHandler(console_handler) + banner_logger.propagate = False # Don't propagate to root logger + + try: + banner_text = """ + \033[1;31mā–ˆā–ˆā•— ā–ˆā–ˆā•—\033[1;33mā–ˆā–ˆā•— \033[1;32mā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;36mā–ˆā–ˆā•— ā–ˆā–ˆā•—\033[1;34mā–ˆā–ˆā•—\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;91mā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;93mā–ˆā–ˆā•—\033[0m + \033[1;31mā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•\033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—\033[1;36mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā•”ā•ā•ā•ā•ā• \033[1;91mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—\033[1;93mā–ˆā–ˆā•‘\033[0m + \033[1;31mā–ˆā–ˆā–ˆā–ˆā–ˆā•”ā• \033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘\033[1;36mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— \033[1;91mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m + \033[1;31mā–ˆā–ˆā•”ā•ā–ˆā–ˆā•— \033[1;33mā–ˆā–ˆā•‘ \033[1;32mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘\033[1;36mā•šā–ˆā–ˆā•— ā–ˆā–ˆā•”ā•\033[1;34mā–ˆā–ˆā•‘\033[1;35mā•šā•ā•ā•ā•ā–ˆā–ˆā•‘ \033[1;91mā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m + \033[1;31mā–ˆā–ˆā•‘ ā–ˆā–ˆā•—\033[1;33mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•—\033[1;32mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ \033[1;36mā•šā–ˆā–ˆā–ˆā–ˆā•”ā• \033[1;34mā–ˆā–ˆā•‘\033[1;35mā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ \033[1;91mā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘\033[1;93mā–ˆā–ˆā•‘\033[0m + \033[1;31mā•šā•ā• ā•šā•ā•\033[1;33mā•šā•ā•ā•ā•ā•ā•ā•\033[1;32mā•šā•ā• ā•šā•ā• \033[1;36mā•šā•ā•ā•ā• \033[1;34mā•šā•ā•\033[1;35mā•šā•ā•ā•ā•ā•ā•ā• \033[1;91mā•šā•ā• ā•šā•ā•\033[1;93mā•šā•ā•\033[0m + + \033[1;32mEmpowering AI with Seamless Integration\033[0m + + \033[1;36m═════════════════════════════════════════════════════════════════════════════\033[0m + \033[1;32m STRATA MCP \033[0mĀ·\033[1;33m One MCP server that use tools reliably at any scale \033[0m + \033[1;36m═════════════════════════════════════════════════════════════════════════════\033[0m + \033[1;33m→ Starting MCP Server...\033[0m +""" + banner_logger.info(banner_text) + finally: + # Clean up the handler + banner_logger.removeHandler(console_handler) + + +def setup_logging(log_level: str = "INFO", no_banner: bool = False) -> None: + """Configure logging to output to both console and file. + + Args: + log_level: The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) + no_banner: Skip displaying the banner on startup + """ + # Create cache directory for logs + cache_dir = Path(user_cache_dir("strata")) + cache_dir.mkdir(parents=True, exist_ok=True) + + # Generate log file name with timestamp + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + log_file = cache_dir / f"{timestamp}.log" + + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(getattr(logging, log_level.upper())) + + # Clear any existing handlers + root_logger.handlers.clear() + + # Create formatter + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + # Console handler + console_handler = logging.StreamHandler() + console_handler.setLevel(getattr(logging, log_level.upper())) + console_handler.setFormatter(formatter) + root_logger.addHandler(console_handler) + + # Log the setup to console only (before adding file handler) + logger = logging.getLogger(__name__) + if not no_banner: + log_banner() + # Log initialization message without logging prefix using clean formatter + logger.info(f"Logging initialized - Console: {log_level}, File: {log_file}") + + logger.handlers.clear() + + # File handler (added after the initialization message) + file_handler = logging.FileHandler(log_file, encoding="utf-8", delay=True) + file_handler.setLevel(logging.DEBUG) # Always log DEBUG and above to file + file_handler.setFormatter(formatter) + root_logger.addHandler(file_handler) + + +def get_log_dir() -> Path: + """Get the directory where log files are stored. + + Returns: + Path to the log directory + """ + return Path(user_cache_dir("strata")) diff --git a/open-strata/src/strata/main.py b/open-strata/src/strata/main.py new file mode 100644 index 00000000..ddac0842 --- /dev/null +++ b/open-strata/src/strata/main.py @@ -0,0 +1,6 @@ +"""Main entry point for Strata MCP Router.""" + +from .cli import main + +if __name__ == "__main__": + main() diff --git a/open-strata/src/strata/mcp_client_manager.py b/open-strata/src/strata/mcp_client_manager.py new file mode 100644 index 00000000..b4cf2702 --- /dev/null +++ b/open-strata/src/strata/mcp_client_manager.py @@ -0,0 +1,268 @@ +"""MCP Client Manager for managing multiple MCP server connections.""" + +import asyncio +import logging +from pathlib import Path +from typing import Dict, List, Optional + +from strata.config import MCPServerConfig, MCPServerList +from strata.mcp_proxy.client import MCPClient +from strata.mcp_proxy.transport.http import HTTPTransport +from strata.mcp_proxy.transport.stdio import StdioTransport + +logger = logging.getLogger(__name__) + + +class MCPClientManager: + """Manages multiple MCP client connections based on configuration.""" + + def __init__(self, config_path: Optional[Path] = None, server_names: Optional[List[str]] = None): + """Initialize the MCP client manager. + + Args: + config_path: Optional path to configuration file. + If None, uses default from MCPServerList. + server_names: Optional list of specific server names to initialize. + If None, all enabled servers will be initialized. + """ + self.server_list = MCPServerList(config_path) + self.server_names = server_names # Specific servers to manage + self.active_clients: Dict[str, MCPClient] = {} + self.active_transports: Dict[str, HTTPTransport | StdioTransport] = {} + # Cache of current server configs for comparison during sync + self.cached_configs: List[MCPServerConfig] = [] + # Mutex to prevent concurrent sync operations + self._sync_lock = asyncio.Lock() + + async def initialize_from_config(self) -> Dict[str, bool]: + """Initialize MCP clients from configuration. + + Only initializes servers that are enabled in the configuration. + If server_names was specified in __init__, only those servers will be initialized. + + Returns: + Dict mapping server names to success status (True if connected) + """ + results = {} + enabled_servers = self.server_list.list_servers(enabled_only=True) + + # Filter servers if specific names were provided + if self.server_names: + enabled_servers = [s for s in enabled_servers if s.name in self.server_names] + + for server in enabled_servers: + try: + await self._connect_server(server) + results[server.name] = True + logger.info(f"Successfully connected to MCP server: {server.name}") + except Exception as e: + results[server.name] = False + logger.error(f"Failed to connect to MCP server {server.name}: {e}") + + # Cache all server configs (both enabled and disabled) for future comparisons + self.cached_configs = self.server_list.list_servers() + + return results + + async def authenticate_server(self, server_name: str) -> None: + """Authenticate a single MCP server. + + Args: + server_name: Name of the server to authenticate + """ + if server_name in self.active_clients: + client = self.active_clients[server_name] + try: + await client.initialize() + await client.disconnect() + logger.info(f"Initialized MCP server: {server_name}") + except Exception as e: + logger.error(f"Error initializing {server_name}: {e}") + + async def _connect_server(self, server: MCPServerConfig) -> None: + """Connect to a single MCP server. + + Args: + server: Server configuration + + Raises: + Exception: If connection fails + """ + # Create transport based on type + if server.type in ["sse", "http"]: + if not server.url: + raise ValueError(f"Server {server.name} has no URL configured") + + transport = HTTPTransport( + server_name=server.name, + url=server.url, + mode=server.type, # "http" or "sse" # type: ignore + headers=server.headers, + auth=server.auth + ) + else: # stdio/command + if not server.command: + raise ValueError(f"Server {server.name} has no command configured") + transport = StdioTransport( + command=server.command, args=server.args, env=server.env + ) + + # Create client + client = MCPClient(transport) + + # Connect + await client.connect() + + # Store active client and transport + self.active_clients[server.name] = client + self.active_transports[server.name] = transport + + async def _disconnect_server(self, server_name: str) -> None: + """Disconnect from a single MCP server. + + Args: + server_name: Name of the server to disconnect + """ + if server_name in self.active_clients: + client = self.active_clients[server_name] + try: + await client.disconnect() + logger.info(f"Disconnected from MCP server: {server_name}") + except Exception as e: + logger.error(f"Error disconnecting from {server_name}: {e}") + finally: + # Remove from active clients + del self.active_clients[server_name] + if server_name in self.active_transports: + del self.active_transports[server_name] + + async def sync_with_config(self, new_servers: Dict[str, MCPServerConfig]) -> None: + """Sync the manager state with new configuration. + + This method handles all changes: add, remove, enable, disable, and config updates. + Uses a mutex lock to prevent concurrent sync operations. + + Args: + new_servers: New server configurations from config file + """ + async with self._sync_lock: + # Create lookup for current cached configs by name + cached_by_name = {config.name: config for config in self.cached_configs} + + # Find servers to remove (in active clients but not in new config) + servers_to_remove = set(self.active_clients.keys()) - set( + new_servers.keys() + ) + for server_name in servers_to_remove: + await self._disconnect_server(server_name) + logger.info(f"Removed MCP server: {server_name}") + + # Process each server in new config + for server_name, new_config in new_servers.items(): + try: + is_active = server_name in self.active_clients + cached_config = cached_by_name.get(server_name) + config_changed = cached_config != new_config + + if new_config.enabled: + if not is_active: + # Server is enabled but not connected, connect it + await self._connect_server(new_config) + logger.info(f"Connected to MCP server: {server_name}") + elif config_changed: + # Server is active but config changed, reconnect + await self._disconnect_server(server_name) + await self._connect_server(new_config) + logger.info( + f"Reconnected MCP server with new config: {server_name}" + ) + # If server is active and config unchanged, do nothing + else: + if is_active: + # Server is disabled but still connected, disconnect it + await self._disconnect_server(server_name) + logger.info(f"Disabled MCP server: {server_name}") + except Exception as e: + logger.error(f"Failed to sync server {server_name}: {e}") + + # Update cached configs with new config + self.cached_configs = list(new_servers.values()) + + def get_client(self, server_name: str) -> MCPClient: + """Get an active MCP client by server name. + + Args: + server_name: Name of the server + + Returns: + MCPClient instance if active, None otherwise + """ + return self.active_clients[server_name] + + def list_active_servers(self) -> list[str]: + """List names of all active (connected) servers. + + Returns: + List of active server names + """ + return list(self.active_clients.keys()) + + def is_connected(self, server_name: str) -> bool: + """Check if a server is currently connected. + + Args: + server_name: Name of the server + + Returns: + True if connected, False otherwise + """ + client = self.active_clients.get(server_name) + return client is not None and client.is_connected() + + async def disconnect_all(self) -> None: + """Disconnect from all active MCP servers.""" + server_names = list(self.active_clients.keys()) + for server_name in server_names: + await self._disconnect_server(server_name) + logger.info("Disconnected from all MCP servers") + + async def reconnect_server(self, server_name: str) -> bool: + """Reconnect to a server (disconnect if connected, then connect). + + Args: + server_name: Name of the server to reconnect + + Returns: + True if successfully reconnected, False otherwise + """ + server = self.server_list.get_server(server_name) + if server is None: + logger.error(f"Server not found: {server_name}") + return False + + if not server.enabled: + logger.error(f"Cannot reconnect disabled server: {server_name}") + return False + + try: + # Disconnect if active + if server_name in self.active_clients: + await self._disconnect_server(server_name) + + # Reconnect + await self._connect_server(server) + logger.info(f"Reconnected to MCP server: {server_name}") + return True + + except Exception as e: + logger.error(f"Failed to reconnect to server {server_name}: {e}") + return False + + async def __aenter__(self): + """Enter async context manager.""" + await self.initialize_from_config() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Exit async context manager.""" + await self.disconnect_all() diff --git a/open-strata/src/strata/mcp_proxy/__init__.py b/open-strata/src/strata/mcp_proxy/__init__.py new file mode 100644 index 00000000..cd25c815 --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/__init__.py @@ -0,0 +1,7 @@ +"""MCP Proxy module for connecting to and interacting with MCP servers.""" + +from .client import MCPClient +from .transport import HTTPTransport, StdioTransport, Transport +from .auth_provider import create_oauth_provider + +__all__ = ["MCPClient", "StdioTransport", "HTTPTransport", "Transport", "create_oauth_provider"] diff --git a/open-strata/src/strata/mcp_proxy/auth_provider.py b/open-strata/src/strata/mcp_proxy/auth_provider.py new file mode 100644 index 00000000..110252b4 --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/auth_provider.py @@ -0,0 +1,200 @@ +import threading +import time +import webbrowser +import os +import json +import logging + +from http.server import BaseHTTPRequestHandler, HTTPServer +from urllib.parse import parse_qs, urlparse + +from mcp.client.auth import OAuthClientProvider, TokenStorage +from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata, OAuthToken + + +# Set up logging +logger = logging.getLogger(__name__) + +TOKEN_DIRECTORY = '.tokens' + +class LocalTokenStorage(TokenStorage): + """Simple in-memory token storage implementation.""" + + def __init__(self, server_name: str = "default"): + self.server_name: str = server_name + self._tokens: OAuthToken | None = None + self._client_info: OAuthClientInformationFull | None = None + self.token_lock = threading.Lock() + self.info_lock = threading.Lock() + self.TOKEN_PATH = os.path.join(TOKEN_DIRECTORY, self.server_name,"tokens.json") + + async def get_tokens(self) -> OAuthToken | None: + if os.path.exists(self.TOKEN_PATH) and self._tokens is None: + with self.token_lock: + with open(self.TOKEN_PATH, "r") as f: + try: + data = json.load(f) + self._tokens = OAuthToken.model_validate(data) + except Exception as e: + logger.info("Error loading tokens:", e) + return self._tokens + + async def set_tokens(self, tokens: OAuthToken) -> None: + self._tokens = tokens + os.makedirs(os.path.dirname(self.TOKEN_PATH), exist_ok=True) + with self.token_lock: + with open(self.TOKEN_PATH, "w") as f: + dump = tokens.model_dump(exclude_none=True, mode='json') + json.dump(dump, f) + + async def get_client_info(self) -> OAuthClientInformationFull | None: + return self._client_info + + async def set_client_info(self, client_info: OAuthClientInformationFull) -> None: + self._client_info = client_info + + +class CallbackHandler(BaseHTTPRequestHandler): + """Simple HTTP handler to capture OAuth callback.""" + + def __init__(self, request, client_address, server, callback_data): + """Initialize with callback data storage.""" + self.callback_data = callback_data + super().__init__(request, client_address, server) + + def do_GET(self): + """Handle GET request from OAuth redirect.""" + parsed = urlparse(self.path) + query_params = parse_qs(parsed.query) + + if "code" in query_params: + self.callback_data["authorization_code"] = query_params["code"][0] + self.callback_data["state"] = query_params.get("state", [None])[0] + self.send_response(200) + self.send_header("Content-type", "text/html") + self.end_headers() + self.wfile.write(b""" + + +

Authorization Successful!

+

You can close this window and return to the terminal.

+ + + + """) + elif "error" in query_params: + self.callback_data["error"] = query_params["error"][0] + self.send_response(400) + self.send_header("Content-type", "text/html") + self.end_headers() + self.wfile.write( + f""" + + +

Authorization Failed

+

Error: {query_params["error"][0]}

+

You can close this window and return to the terminal.

+ + + """.encode() + ) + else: + self.send_response(404) + self.end_headers() + + def log_message(self, format, *args): + """Suppress default logging.""" + pass + + +class CallbackServer: + """Simple server to handle OAuth callbacks.""" + + def __init__(self, port=3000): + self.port = port + self.server = None + self.thread = None + self.callback_data = {"authorization_code": None, "state": None, "error": None} + + def _create_handler_with_data(self): + """Create a handler class with access to callback data.""" + callback_data = self.callback_data + + class DataCallbackHandler(CallbackHandler): + def __init__(self, request, client_address, server): + super().__init__(request, client_address, server, callback_data) + + return DataCallbackHandler + + def start(self): + """Start the callback server in a background thread.""" + handler_class = self._create_handler_with_data() + self.server = HTTPServer(("localhost", self.port), handler_class) + self.thread = threading.Thread(target=self.server.serve_forever, daemon=True) + self.thread.start() + logger.info(f"šŸ–„ļø Started callback server on http://localhost:{self.port}") + + def stop(self): + """Stop the callback server.""" + if self.server: + self.server.shutdown() + self.server.server_close() + if self.thread: + self.thread.join(timeout=1) + + def wait_for_callback(self, timeout=300): + """Wait for OAuth callback with timeout.""" + start_time = time.time() + while time.time() - start_time < timeout: + if self.callback_data["authorization_code"]: + return self.callback_data["authorization_code"] + elif self.callback_data["error"]: + raise Exception(f"OAuth error: {self.callback_data['error']}") + time.sleep(0.1) + raise Exception("Timeout waiting for OAuth callback") + + def get_state(self): + """Get the received state parameter.""" + return self.callback_data["state"] + +def create_oauth_provider(server_name: str, url: str) -> OAuthClientProvider: + """Create OAuth authentication provider.""" + client_metadata_dict = { + "client_name": "KLAVIS Strata MCP Router", + "redirect_uris": ["http://localhost:3030/callback"], + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "token_endpoint_auth_method": "client_secret_post", + } + + async def callback_handler() -> tuple[str, str | None]: + """Wait for OAuth callback and return auth code and state.""" + callback_server = CallbackServer(port=3030) + callback_server.start() + logger.info("ā³ Waiting for authorization callback...") + try: + auth_code = callback_server.wait_for_callback(timeout=300) + return auth_code, callback_server.get_state() + finally: + callback_server.stop() + + async def _default_redirect_handler(authorization_url: str) -> None: + """Default redirect handler that opens the URL in a browser.""" + logger.info(f"Opening browser for authorization: {authorization_url}") + webbrowser.open(authorization_url) + + auth_url = "" + if url.endswith("/mcp"): + auth_url = url[:-4] + elif url.endswith("/sse"): + auth_url = url[:-4] + else: + auth_url = url + + return OAuthClientProvider( + server_url=auth_url, + client_metadata=OAuthClientMetadata.model_validate(client_metadata_dict), + storage=LocalTokenStorage(server_name), + redirect_handler=_default_redirect_handler, + callback_handler=callback_handler, + ) \ No newline at end of file diff --git a/open-strata/src/strata/mcp_proxy/client.py b/open-strata/src/strata/mcp_proxy/client.py new file mode 100644 index 00000000..677c9c8a --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/client.py @@ -0,0 +1,148 @@ +"""MCP Client for connecting to and interacting with MCP servers.""" + +import logging +from typing import Any, Dict, List, Optional + +from mcp import types + +from .transport import Transport + +logger = logging.getLogger(__name__) + + +class MCPClient: + """Client for connecting to MCP servers using various transports. + + Usage: + # With stdio transport + transport = StdioTransport("docker", ["run", "-i", "my-server"]) + client = MCPClient(transport) + await client.connect() + + # With HTTP/SSE transport + transport = HTTPTransport("http://localhost:8080/", mode="sse") + client = MCPClient(transport) + await client.connect() + """ + + def __init__(self, transport: Transport): + """Initialize the MCP client with a transport. + + Args: + transport: Transport instance (StdioTransport or HTTPTransport) + """ + self.transport = transport + self._tools_cache: Optional[List[Dict[str, Any]]] = None + + async def initialize(self) -> None: + """Initialize the MCP client by connecting the transport.""" + await self.transport.initialize() + + async def connect(self) -> None: + """Connect to the MCP server.""" + await self.transport.connect() + logger.info( + f"Connected to MCP server using {self.transport.__class__.__name__}" + ) + + async def disconnect(self) -> None: + """Disconnect from the MCP server.""" + await self.transport.disconnect() + self._tools_cache = None + logger.info("Disconnected from MCP server") + + def is_connected(self) -> bool: + """Check if connected to an MCP server.""" + return self.transport.is_connected() + + async def list_tools(self, use_cache: bool = True) -> List[Dict[str, Any]]: + """List available tools from the MCP server. + + Args: + use_cache: Whether to use cached tools if available + + Returns: + List of tool definitions with name, description, and inputSchema + """ + if not self.transport.is_connected(): + raise RuntimeError("Not connected to any MCP server") + + if use_cache and self._tools_cache is not None: + return self._tools_cache + + # Get tools from server + session = self.transport.get_session() + response = await session.list_tools() + + # Convert to dict format + tools = [] + for tool in response.tools: + tool_dict = { + "name": tool.name, + "description": tool.description, + "inputSchema": tool.inputSchema, + } + # Add optional fields only if they have values + if hasattr(tool, "title") and tool.title: + tool_dict["title"] = tool.title + if hasattr(tool, "outputSchema") and tool.outputSchema: + tool_dict["outputSchema"] = tool.outputSchema + tools.append(tool_dict) + + self._tools_cache = tools + logger.info(f"Retrieved {len(tools)} tools from MCP server") + + return tools + + async def call_tool( + self, tool_name: str, arguments: Dict[str, Any] + ) -> List[types.ContentBlock]: + """Call a tool on the MCP server. + + Args: + tool_name: Name of the tool to call + arguments: Arguments to pass to the tool + + Returns: + Tool execution result from MCP server + """ + if not self.transport.is_connected(): + raise RuntimeError("Not connected to any MCP server") + + logger.info(f"Calling tool '{tool_name}' with arguments: {arguments}") + + # Call the tool and return result directly + session = self.transport.get_session() + result = await session.call_tool(tool_name, arguments) + if result.isError: + logger.error( + f"Tool '{tool_name}' returned error: {result.structuredContent}" + ) + raise RuntimeError(f"Tool '{tool_name}' error: {result.structuredContent}") + return result.content + + async def get_tool_schema(self, tool_name: str) -> Optional[Dict[str, Any]]: + """Get the schema for a specific tool. + + Args: + tool_name: Name of the tool + + Returns: + Tool schema or None if not found + """ + tools = await self.list_tools() + + for tool in tools: + if tool["name"] == tool_name: + return tool + + return None + + async def __aenter__(self): + """Enter async context manager.""" + await self.connect() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Exit async context manager.""" + await self.disconnect() diff --git a/open-strata/src/strata/mcp_proxy/transport/__init__.py b/open-strata/src/strata/mcp_proxy/transport/__init__.py new file mode 100644 index 00000000..d1ebab17 --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/transport/__init__.py @@ -0,0 +1,7 @@ +"""Transport implementations for MCP client.""" + +from .base import Transport +from .http import HTTPTransport +from .stdio import StdioTransport + +__all__ = ["Transport", "HTTPTransport", "StdioTransport"] diff --git a/open-strata/src/strata/mcp_proxy/transport/base.py b/open-strata/src/strata/mcp_proxy/transport/base.py new file mode 100644 index 00000000..4c2b4f1b --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/transport/base.py @@ -0,0 +1,104 @@ +"""Abstract base class for MCP transport implementations.""" + +import logging +from abc import ABC, abstractmethod +from contextlib import AsyncExitStack +from typing import Optional, Tuple + +from mcp.client.session import ClientSession + +logger = logging.getLogger(__name__) + + +class Transport(ABC): + """Abstract base class for MCP transport implementations.""" + + def __init__(self): + """Initialize the transport.""" + self._session: Optional[ClientSession] = None + self._exit_stack: Optional[AsyncExitStack] = None + self._connected: bool = False + + @abstractmethod + async def _get_streams(self, exit_stack: AsyncExitStack) -> Tuple: + """Get the transport-specific streams. + + Args: + exit_stack: AsyncExitStack to manage the streams + + Returns: + Tuple of (read_stream, write_stream) + """ + async def initialize(self) -> None: + if not self._exit_stack: + self._exit_stack = AsyncExitStack() + + try: + # Get transport-specific streams + streams = await self._get_streams(self._exit_stack) + + # Create client session (common for all transports) + self._session = await self._exit_stack.enter_async_context( + ClientSession(streams[0], streams[1]) + ) + logger.info("Client session created successfully") + # Initialize the session + await self._session.initialize() + + self._connected = True + logger.info(f"Successfully connected via {self.__class__.__name__}") + + except Exception as e: + logger.error(f"Failed to connect via {self.__class__.__name__}: {e}") + if self._exit_stack: + await self._exit_stack.aclose() + self._exit_stack = None + raise + + async def connect(self) -> None: + """Connect to the MCP server using the specific transport.""" + if self._connected: + return + + await self.initialize() + + async def disconnect(self) -> None: + """Disconnect from the MCP server.""" + if not self._connected: + return + + if self._exit_stack: + try: + await self._exit_stack.aclose() + except RuntimeError as e: + # Handle cross-task cleanup errors from anyio's CancelScope + if "cancel scope" in str(e).lower(): + logger.warning( + "Cross-task cleanup detected and handled. " + "This typically happens with pytest fixtures." + ) + else: + raise + + self._session = None + self._exit_stack = None + self._connected = False + + def is_connected(self) -> bool: + """Check if connected to an MCP server.""" + return self._connected + + def get_session(self) -> ClientSession: + """Get the current client session.""" + if not self._connected or not self._session: + raise RuntimeError("Not connected to an MCP server.") + return self._session + + async def __aenter__(self): + """Enter async context manager.""" + await self.connect() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Exit async context manager.""" + await self.disconnect() diff --git a/open-strata/src/strata/mcp_proxy/transport/http.py b/open-strata/src/strata/mcp_proxy/transport/http.py new file mode 100644 index 00000000..839bdffc --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/transport/http.py @@ -0,0 +1,80 @@ +"""HTTP and SSE transport implementations for MCP.""" + +import logging +from contextlib import AsyncExitStack +from typing import Dict, Literal, Optional, Tuple +from urllib.parse import urlparse + +from mcp.client.sse import sse_client +from mcp.client.streamable_http import streamablehttp_client + +from strata.mcp_proxy.auth_provider import create_oauth_provider +from .base import Transport + +logger = logging.getLogger(__name__) + +class HTTPTransport(Transport): + """HTTP/SSE transport for MCP communication.""" + + def __init__( + self, + server_name: str, + url: str, + mode: Literal["http", "sse"] = "http", + headers: Optional[Dict[str, str]] = None, + auth: str = "", + ): + """Initialize HTTP transport. + + Args: + url: HTTP/HTTPS URL of the MCP server + mode: Transport mode - "http" for request/response, "sse" for server-sent events + headers: Optional headers to send with requests + """ + super().__init__() + self.server_name = server_name + self.url = url + self.mode = mode + self.headers = headers or {} + self.auth = auth + + # Validate URL + parsed = urlparse(url) + if parsed.scheme not in ("http", "https"): + raise ValueError(f"Invalid URL scheme: {parsed.scheme}") + + async def _get_streams(self, exit_stack: AsyncExitStack) -> Tuple: + """Get HTTP/SSE transport streams. + + Args: + exit_stack: AsyncExitStack to manage the streams + + Returns: + Tuple of (read_stream, write_stream) + """ + if self.mode == "sse": + # Connect via SSE for server-sent events + logger.info(f"Connecting to MCP server via SSE: {self.url}") + if self.auth == "oauth": + return await exit_stack.enter_async_context( + sse_client( + self.url, headers=self.headers, auth=create_oauth_provider(self.server_name, self.url)) + ) + return await exit_stack.enter_async_context( + sse_client(self.url, headers=self.headers) + ) + elif self.mode == "http": + # Connect via standard HTTP (request/response) + logger.info(f"Connecting to MCP server via HTTP: {self.url}") + if self.auth == "oauth": + return await exit_stack.enter_async_context( + streamablehttp_client( + self.url, headers=self.headers, auth=create_oauth_provider(self.server_name, self.url)) + ) + return await exit_stack.enter_async_context( + streamablehttp_client(self.url, headers=self.headers) + ) + else: + raise ValueError( + f"Invalid transport mode: {self.mode}. Use 'http' or 'sse'." + ) diff --git a/open-strata/src/strata/mcp_proxy/transport/stdio.py b/open-strata/src/strata/mcp_proxy/transport/stdio.py new file mode 100644 index 00000000..c086567c --- /dev/null +++ b/open-strata/src/strata/mcp_proxy/transport/stdio.py @@ -0,0 +1,69 @@ +"""Stdio transport implementation for MCP.""" + +import logging +from contextlib import AsyncExitStack +from typing import Dict, List, Optional, Tuple + +from mcp.client.stdio import StdioServerParameters, stdio_client + +from .base import Transport + +logger = logging.getLogger(__name__) + + +class StdioTransport(Transport): + """Stdio transport for MCP communication.""" + + def __init__( + self, + command: str, + args: Optional[List[str]] = None, + env: Optional[Dict[str, str]] = None, + ): + """Initialize stdio transport. + + Args: + command: Command to execute (e.g., "docker") + args: Command arguments (e.g., ["run", "-i", "--rm", "-e", "GITHUB_PERSONAL_ACCESS_TOKEN", "ghcr.io/github/github-mcp-server"]) + env: Environment variables to pass to the command (e.g., {"GITHUB_PERSONAL_ACCESS_TOKEN": "${input:github_token}"}) + + Example server configuration: + "servers": { + "github": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server" + ], + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${input:github_token}" + } + } + } + """ + super().__init__() + self.command = command + self.args = args or [] + self.env = env or {} + + async def _get_streams(self, exit_stack: AsyncExitStack) -> Tuple: + """Get stdio transport streams. + + Args: + exit_stack: AsyncExitStack to manage the streams + + Returns: + Tuple of (read_stream, write_stream) + """ + # Create stdio server parameters + server_params = StdioServerParameters( + command=self.command, args=self.args, env=self.env + ) + + # Connect via stdio + logger.info(f"Connecting to MCP server via stdio: {self.command} {self.args}") + return await exit_stack.enter_async_context(stdio_client(server_params)) diff --git a/open-strata/src/strata/server.py b/open-strata/src/strata/server.py new file mode 100644 index 00000000..e7282b2c --- /dev/null +++ b/open-strata/src/strata/server.py @@ -0,0 +1,205 @@ +"""Main server module for Strata MCP Router.""" + +import asyncio +import contextlib +import logging +import os +from collections.abc import AsyncIterator + +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.sse import SseServerTransport +from mcp.server.stdio import stdio_server +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.applications import Starlette +from starlette.responses import Response +from starlette.routing import Mount, Route +from starlette.types import Receive, Scope, Send + +from .mcp_client_manager import MCPClientManager +from .tools import execute_tool, get_tool_definitions + +# Configure logging +logger = logging.getLogger(__name__) + +MCP_ROUTER_PORT = int(os.getenv("MCP_ROUTER_PORT", "8080")) + +# Global client manager +client_manager = MCPClientManager() + + +@contextlib.asynccontextmanager +async def config_watching_context(): + """Shared context manager for config watching in both stdio and HTTP modes.""" + # Initialize client manager + try: + await client_manager.initialize_from_config() + logger.info("Client managers initialized successfully") + except Exception as e: + logger.error(f"Failed to initialize client managers: {e}") + # Continue anyway, managers will be created on demand + + # Start config watching in background + def on_config_changed(new_servers): + """Handle config changes by syncing the client manager.""" + logger.info("Config file changed, syncing client manager...") + + async def safe_sync(): + """Safely sync with config, catching any errors.""" + try: + await client_manager.sync_with_config(new_servers) + except Exception as e: + logger.error(f"Error during config sync: {e}") + # Don't let sync errors crash the server + + # Schedule sync on the event loop + asyncio.create_task(safe_sync()) + + # Start watching config file for changes + watch_task = asyncio.create_task( + client_manager.server_list.watch_config(on_config_changed) + ) + logger.info("Config file watching enabled - changes will be auto-synced") + + try: + yield + finally: + logger.info("Shutting down...") + # Stop config watching + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + logger.info("Config watching stopped") + # Clean up client managers + await client_manager.disconnect_all() + + +def setup_server_handlers(server: Server) -> None: + """Set up shared MCP server handlers for both stdio and HTTP modes.""" + + @server.list_tools() + async def list_tools() -> list[types.Tool]: + """List all available Strata tools.""" + try: + # Get available servers from client manager + user_available_servers = list(client_manager.active_clients.keys()) + return get_tool_definitions(user_available_servers) + except Exception as e: + logger.error(f"Error listing strata tools: {str(e)}") + return [] + + @server.call_tool(validate_input=False) + async def call_tool(name: str, arguments: dict) -> list[types.ContentBlock]: + """Call one of the strata tools.""" + return await execute_tool(name, arguments, client_manager) + + +async def run_stdio_server_async() -> None: + """Run the Strata MCP router in stdio mode.""" + + # Create server instance + server = Server("strata-mcp-stdio") + + # Set up shared handlers + setup_server_handlers(server) + + # Use shared config watching context manager + logger.info("Strata MCP Router running in stdio mode") + async with config_watching_context(): + async with stdio_server() as (read_stream, write_stream): + await server.run( + read_stream, write_stream, server.create_initialization_options() + ) + + +def run_stdio_server() -> int: + """Run the stdio server synchronously.""" + try: + asyncio.run(run_stdio_server_async()) + return 0 + except KeyboardInterrupt: + logger.info("Stdio server stopped by user") + return 0 + except Exception as e: + logger.error(f"Error running stdio server: {e}") + return 1 + + +def run_server(port: int, json_response: bool) -> int: + """Run the MCP router server with the given configuration.""" + + # Create the MCP router server instance + app = Server("strata-mcp-server") + + # Set up shared handlers + setup_server_handlers(app) + + # Set up SSE transport + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + logger.info("Handling SSE connection for router") + + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + except Exception as e: + logger.error(f"SSE connection error: {e}") + return Response() + + # Set up StreamableHTTP transport + session_manager = StreamableHTTPSessionManager( + app=app, + event_store=None, # Stateless mode + json_response=json_response, + stateless=True, + ) + + async def handle_streamable_http( + scope: Scope, receive: Receive, send: Send + ) -> None: + logger.info("Handling StreamableHTTP request for router") + try: + await session_manager.handle_request(scope, receive, send) + finally: + logger.info("StreamableHTTP request completed") + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + """Context manager for session manager and client initialization.""" + async with config_watching_context(): + async with session_manager.run(): + logger.info("Strata MCP Router started with dual transports!") + logger.info("Available tools:") + logger.info("- discover_server_actions: Discover available actions") + logger.info("- get_action_details: Get detailed action parameters") + logger.info("- execute_action: Execute server actions") + logger.info("- search_documentation: Search server documentation") + logger.info("- handle_auth_failure: Handle authentication issues") + yield + + # Create an ASGI application with routes for both transports + starlette_app = Starlette( + debug=True, + routes=[ + # SSE routes + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Mount("/messages/", app=sse.handle_post_message), + # StreamableHTTP route + Mount("/mcp", app=handle_streamable_http), + ], + lifespan=lifespan, + ) + + logger.info(f"Strata MCP Router starting on port {port} with dual transports") + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + + return 0 diff --git a/open-strata/src/strata/tools.py b/open-strata/src/strata/tools.py new file mode 100644 index 00000000..903eeaf7 --- /dev/null +++ b/open-strata/src/strata/tools.py @@ -0,0 +1,383 @@ +"""Shared tool implementations for Strata MCP Router.""" + +import json +import logging +from typing import List + +import mcp.types as types + +from .mcp_client_manager import MCPClientManager +from .utils.shared_search import UniversalToolSearcher + +logger = logging.getLogger(__name__) + +# Tool Names +TOOL_DISCOVER_SERVER_ACTIONS = "discover_server_actions" +TOOL_GET_ACTION_DETAILS = "get_action_details" +TOOL_EXECUTE_ACTION = "execute_action" +TOOL_SEARCH_DOCUMENTATION = "search_documentation" +TOOL_HANDLE_AUTH_FAILURE = "handle_auth_failure" + + +def get_tool_definitions(user_available_servers: List[str]) -> List[types.Tool]: + """Get tool definitions for the available servers.""" + return [ + types.Tool( + name=TOOL_DISCOVER_SERVER_ACTIONS, + description="**PREFERRED STARTING POINT**: Discover available actions from servers based on user query.", + inputSchema={ + "type": "object", + "required": ["user_query", "server_names"], + "properties": { + "user_query": { + "type": "string", + "description": "Natural language user query to filter results.", + }, + "server_names": { + "type": "array", + "items": {"type": "string", "enum": user_available_servers}, + "description": "List of server names to discover actions from.", + }, + }, + }, + ), + types.Tool( + name=TOOL_GET_ACTION_DETAILS, + description="Get detailed information about a specific action.", + inputSchema={ + "type": "object", + "required": ["server_name", "action_name"], + "properties": { + "server_name": { + "type": "string", + "enum": user_available_servers, + "description": "The name of the server", + }, + "action_name": { + "type": "string", + "description": "The name of the action/operation", + }, + }, + }, + ), + types.Tool( + name=TOOL_EXECUTE_ACTION, + description="Execute a specific action with the provided parameters.", + inputSchema={ + "type": "object", + "required": ["server_name", "action_name"], + "properties": { + "server_name": { + "type": "string", + "enum": user_available_servers, + "description": "The name of the server", + }, + "action_name": { + "type": "string", + "description": "The name of the action/operation to execute", + }, + "path_params": { + "type": "string", + "description": "JSON string containing path parameters", + }, + "query_params": { + "type": "string", + "description": "JSON string containing query parameters", + }, + "body_schema": { + "type": "string", + "description": "JSON string containing request body", + "default": "{}", + }, + }, + }, + ), + types.Tool( + name=TOOL_SEARCH_DOCUMENTATION, + description="Search for server action documentations by keyword matching.", + inputSchema={ + "type": "object", + "required": ["query", "server_name"], + "properties": { + "query": { + "type": "string", + "description": "Search keywords", + }, + "server_name": { + "type": "string", + "enum": user_available_servers, + "description": "Name of the server to search within.", + }, + "max_results": { + "type": "integer", + "description": "Number of results to return. Default: 10", + "minimum": 1, + "maximum": 50, + "default": 10, + }, + }, + }, + ), + types.Tool( + name=TOOL_HANDLE_AUTH_FAILURE, + description="Handle authentication failures that occur when executing actions.", + inputSchema={ + "type": "object", + "required": ["server_name", "intention"], + "properties": { + "server_name": { + "type": "string", + "enum": user_available_servers, + "description": "The name of the server", + }, + "intention": { + "type": "string", + "enum": ["get_auth_url", "save_auth_data"], + "description": "Action to take for authentication", + }, + "auth_data": { + "type": "object", + "description": "Authentication data when saving", + }, + }, + }, + ), + ] + + +async def execute_tool( + name: str, arguments: dict, client_manager: MCPClientManager +) -> List[types.ContentBlock]: + """Execute a tool with the given arguments.""" + try: + result = None + + if name == TOOL_DISCOVER_SERVER_ACTIONS: + user_query = arguments.get("user_query") + server_names = arguments.get("server_names") + + # If no server names provided, use all available servers + if not server_names: + server_names = list(client_manager.active_clients.keys()) + + # Discover actions from specified servers + discovery_result = {} + for server_name in server_names: + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + # Filter tools based on user query if provided + if user_query and tools: + tools_map = {server_name: tools} + searcher = UniversalToolSearcher(tools_map) + search_results = searcher.search(user_query, max_results=50) + + filtered_action_names = [] + for result_item in search_results: + for tool in tools: + if tool["name"] == result_item["name"]: + filtered_action_names.append(tool["name"]) + break + discovery_result[server_name] = { + "action_count": len(filtered_action_names), + "actions": filtered_action_names, + } + else: + # Return only action count if no query + tool_list = tools or [] + discovery_result[server_name] = { + "action_count": len(tool_list), + "actions": [tool["name"] for tool in tool_list], + } + except KeyError: + discovery_result[server_name] = { + "error": f"Server '{server_name}' not found or not connected" + } + except Exception as e: + logger.error( + f"Error discovering actions from {server_name}: {str(e)}" + ) + discovery_result[server_name] = {"error": str(e)} + + result = {"servers": discovery_result} + + elif name == TOOL_GET_ACTION_DETAILS: + server_name = arguments.get("server_name") + action_name = arguments.get("action_name") + + if not server_name or not action_name: + return [ + types.TextContent( + type="text", + text="Error: Both server_name and action_name are required", + ) + ] + + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + action_found = None + for tool in tools or []: + if tool["name"] == action_name: + action_found = tool + break + + if action_found: + result = { + "server": server_name, + "action": { + "name": action_found["name"], + "description": action_found["description"], + "inputSchema": action_found["inputSchema"], + }, + } + else: + result = { + "error": f"Action '{action_name}' not found in server '{server_name}'" + } + except KeyError: + result = {"error": f"Server '{server_name}' not found or not connected"} + except Exception as e: + logger.error(f"Error getting action details: {str(e)}") + result = {"error": f"Error getting action details: {str(e)}"} + + elif name == TOOL_EXECUTE_ACTION: + server_name = arguments.get("server_name") + action_name = arguments.get("action_name") + path_params = arguments.get("path_params") + query_params = arguments.get("query_params") + body_schema = arguments.get("body_schema", "{}") + + if not server_name or not action_name: + return [ + types.TextContent( + type="text", + text="Error: server_name and action_name are required", + ) + ] + + try: + client = client_manager.get_client(server_name) + action_params = {} + + # Parse parameters if they're JSON strings + for param_name, param_value in [ + ("path_params", path_params), + ("query_params", query_params), + ("body_schema", body_schema), + ]: + if param_value and param_value != "{}": + try: + if isinstance(param_value, str): + action_params.update(json.loads(param_value)) + else: + action_params.update(param_value) + except json.JSONDecodeError: + return [ + types.TextContent( + type="text", + text=f"Error: Invalid JSON in {param_name}", + ) + ] + + # Call the tool on the MCP server + return await client.call_tool(action_name, action_params) + + except KeyError: + result = {"error": f"Server '{server_name}' not found or not connected"} + except Exception as e: + logger.error(f"Error executing action: {str(e)}") + result = {"error": f"Error executing action: {str(e)}"} + + elif name == TOOL_SEARCH_DOCUMENTATION: + query = arguments.get("query") + server_name = arguments.get("server_name") + max_results = arguments.get("max_results", 10) + + if not query or not server_name: + return [ + types.TextContent( + type="text", + text="Error: Both query and server_name are required", + ) + ] + + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + tools_map = {server_name: tools if tools else []} + searcher = UniversalToolSearcher(tools_map) + result = searcher.search(query, max_results=max_results) + except KeyError: + result = [ + {"error": f"Server '{server_name}' not found or not connected"} + ] + except Exception as e: + logger.error(f"Error searching documentation: {str(e)}") + result = [{"error": f"Error searching documentation: {str(e)}"}] + + elif name == TOOL_HANDLE_AUTH_FAILURE: + server_name = arguments.get("server_name") + intention = arguments.get("intention") + auth_data = arguments.get("auth_data") + + if not server_name or not intention: + return [ + types.TextContent( + type="text", + text="Error: Both server_name and intention are required", + ) + ] + + try: + if intention == "get_auth_url": + result = { + "server": server_name, + "message": f"Authentication required for server '{server_name}'", + "instructions": "Please provide authentication credentials", + "required_fields": {"token": "Authentication token or API key"}, + } + elif intention == "save_auth_data": + if not auth_data: + return [ + types.TextContent( + type="text", + text="Error: auth_data is required when intention is 'save_auth_data'", + ) + ] + result = { + "server": server_name, + "status": "success", + "message": f"Authentication data saved for server '{server_name}'", + } + else: + result = {"error": f"Invalid intention: '{intention}'"} + except Exception as e: + logger.error(f"Error handling auth failure: {str(e)}") + result = {"error": f"Error handling auth failure: {str(e)}"} + + else: + return [types.TextContent(type="text", text=f"Unknown tool: {name}")] + + # Convert result to TextContent + return [ + types.TextContent( + type="text", + text=( + json.dumps(result, separators=(",", ":")) + if isinstance(result, (dict, list)) + else str(result) + ), + ) + ] + + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", text=f"Error executing tool '{name}': {str(e)}" + ) + ] diff --git a/open-strata/src/strata/utils/__init__.py b/open-strata/src/strata/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/open-strata/src/strata/utils/bm25_search.py b/open-strata/src/strata/utils/bm25_search.py new file mode 100644 index 00000000..62151a02 --- /dev/null +++ b/open-strata/src/strata/utils/bm25_search.py @@ -0,0 +1,181 @@ +""" +BM25+ based search utility with field-level scoring + +This implementation flattens document fields into separate documents for independent scoring, +then aggregates scores by original document ID with field weights. + +Algorithm: +1. Each field becomes a separate document: "original_id:field_key" -> field_value +2. BM25 scores each field independently +3. Final score = sum(field_score * field_weight) for all fields of same original_id + +Installation: + pip install "bm25s" + pip install PyStemmer +""" + +from collections import defaultdict +from typing import List, Tuple + +import bm25s +import Stemmer + + +class BM25SearchEngine: + """ + Field-aware BM25+ search engine that scores each field independently + """ + + def __init__(self, use_stemmer: bool = True): + """ + Initialize the BM25+ search engine + + Args: + use_stemmer: Whether to use stemming for better search results + """ + self.stemmer = Stemmer.Stemmer("english") if use_stemmer else None + self.retriever = None + # Maps flattened_doc_id -> (original_doc_id, field_key, field_weight) + self.corpus_metadata = None + # Maps original_doc_id -> [(field_key, weight), ...] + self.doc_field_weights = None + + def build_index(self, documents: List[Tuple[List[Tuple[str, str, int]], str]]): + """ + Build BM25 index from documents by flattening fields into separate documents + + Args: + documents: List of (fields, doc_id) tuples + fields: List of (field_key, field_value, weight) tuples + doc_id: Document identifier string + + Example: + documents = [ + ( + [ + ("service", "projects", 30), + ("operation", "create_project", 30), + ("description", "Creates a new project", 20), + ], + "projects:create_project" + ), + ] + + This creates separate BM25 documents: + - "projects:create_project:service" -> "projects" + - "projects:create_project:operation" -> "create_project" + - "projects:create_project:description" -> "Creates a new project" + """ + corpus = [] + self.corpus_metadata = [] + self.doc_field_weights = defaultdict(list) + + for fields, original_doc_id in documents: + for field_key, field_value, weight in fields: + if field_value and weight > 0: + # Preprocess field value for better tokenization + processed_value = self._preprocess_field_value(field_value.strip()) + corpus.append(processed_value) + + # Store metadata: flattened_id -> (original_id, field_key, weight) + self.corpus_metadata.append((original_doc_id, field_key, weight)) + + # Store field weights by original document + self.doc_field_weights[original_doc_id].append((field_key, weight)) + + if not corpus: + raise ValueError("No documents to index") + + # Tokenize corpus (each field value separately) + corpus_tokens = bm25s.tokenize( + corpus, + stopwords=[], # Disable stopwords for better field matching + show_progress=False, + ) + + # Create and index BM25+ retriever + self.retriever = bm25s.BM25(method="bm25+") + self.retriever.index(corpus_tokens, show_progress=False) + + def search(self, query: str, top_k: int = 10) -> List[Tuple[float, str]]: + """ + Search indexed documents with field-level scoring and weighted aggregation + + Args: + query: Search query string + top_k: Number of top results to return + + Returns: + List of (score, doc_id) tuples sorted by score descending + + Algorithm: + 1. Search all flattened field documents + 2. Group results by original document ID + 3. Calculate weighted sum: score = sum(field_score * field_weight) + 4. Return top_k results by final weighted score + """ + if self.retriever is None or self.corpus_metadata is None: + raise ValueError("No documents indexed. Call build_index() first.") + + # Tokenize query (matching build_index settings) + query_tokens = bm25s.tokenize( + query, + stopwords=[], # Disable stopwords to match build_index + show_progress=False, + ) + + # Search all flattened documents to ensure complete field aggregation + # We need all matching fields for accurate document scoring + search_k = len(self.corpus_metadata) + doc_indices, scores = self.retriever.retrieve( + query_tokens, k=search_k, show_progress=False + ) + + # Aggregate scores by original document ID + doc_scores = defaultdict(float) + + for i in range(doc_indices.shape[1]): + idx = doc_indices[0, i] + field_score = scores[0, i] + + if idx < len(self.corpus_metadata): + original_doc_id, _, field_weight = self.corpus_metadata[idx] + + # Add weighted field score to document total + weighted_score = float(field_score) * field_weight + doc_scores[original_doc_id] += weighted_score + + # Sort by aggregated score and return top k + sorted_results = sorted(doc_scores.items(), key=lambda x: x[1], reverse=True) + + # Return top_k results as (score, doc_id) tuples + return [(score, doc_id) for doc_id, score in sorted_results[:top_k]] + + def _preprocess_field_value(self, value: str) -> str: + """ + Preprocess field values to improve tokenization + + Converts underscore_separated and camelCase text to space-separated words + for better BM25 matching. + + Examples: + "create_project" -> "create project" + "getUserProjects" -> "get User Projects" + "/api/v1/projects" -> "/api/v1/projects" + """ + import re + + # Replace underscores with spaces + value = value.replace("_", " ") + + # Replace hyphens with spaces + value = value.replace("-", " ") + + # Split camelCase: insert space before uppercase letters + # But preserve existing spaces and special characters + value = re.sub(r"([a-z])([A-Z])", r"\1 \2", value) + + # Clean up multiple spaces + value = re.sub(r"\s+", " ", value).strip() + + return value diff --git a/open-strata/src/strata/utils/dict_utils.py b/open-strata/src/strata/utils/dict_utils.py new file mode 100644 index 00000000..fb545ad1 --- /dev/null +++ b/open-strata/src/strata/utils/dict_utils.py @@ -0,0 +1,29 @@ +from typing import Any, Dict, Optional + + +def find_in_dict_case_insensitive( + name: str, dictionary: Dict[str, Any] +) -> Optional[str]: + """Helper function to find name in dictionary using case-insensitive matching. + + Args: + name: The name to search for + dictionary: Dictionary to search in + + Returns: + The actual key from the dictionary if found, None otherwise + """ + if not isinstance(name, str) or not dictionary: + return None + + # First try exact match + if name in dictionary: + return name + + # Then try case-insensitive match + name_lower = name.lower() + for key in dictionary.keys(): + if isinstance(key, str) and key.lower() == name_lower: + return key + + return None diff --git a/open-strata/src/strata/utils/field_search.py b/open-strata/src/strata/utils/field_search.py new file mode 100644 index 00000000..1659d1a8 --- /dev/null +++ b/open-strata/src/strata/utils/field_search.py @@ -0,0 +1,233 @@ +""" +Field-based weighted search engine + +A simpler alternative to BM25 that focuses on field-level matching +with explicit weights and avoids document length bias. + +## Algorithm Design + +This search engine implements a three-layer scoring system to prevent score explosion +and ensure relevant results: + +### Layer 1: Match Quality Scoring +For each token-field match, we assign a base score based on match quality: +- Exact match: weight Ɨ 3.0 (e.g., field value "projects" == query "projects") +- Word boundary match: weight Ɨ 2.0 (e.g., "list projects" matches query "projects" as complete word) +- Partial match: weight Ɨ 1.0 (e.g., "project_list" contains query "project") + +### Layer 2: Intra-field Token Decay (Harmonic Series) +When multiple query tokens match within the same field, we apply diminishing returns: +- 1st token: 100% of score +- 2nd token: 50% of score (1/2) +- 3rd token: 33% of score (1/3) +- 4th token: 25% of score (1/4) + +This prevents long descriptions from accumulating excessive scores by matching many tokens. + +### Layer 3: Per-field Logarithmic Dampening +After calculating field scores, we apply logarithmic dampening based on field type: +- Description fields (description, param_desc): log(1 + score) Ɨ 5 (stronger dampening) +- Identifier fields (service, operation, tag, path, etc.): log(1 + score) Ɨ 10 (lighter dampening) + +This prevents any single field from dominating the final score, especially verbose fields. + +### Final Score Calculation +- Sum all dampened field scores +- Add diversity bonus: sqrt(matched_field_types) Ɨ 3 + (rewards matching across multiple field types) + +## Problem Scenarios This Solves + +### Scenario 1: Keyword Repetition +Query: "projects" +Without dampening: +- Endpoint A: service="projects"(90) + tag="projects"(90) + path="/projects"(60) + + description="manage projects"(20) = 260 points +- Endpoint B: service="users"(0) + operation="get_user_projects"(60) = 60 points + +With our algorithm: +- Endpoint A: log(91)Ɨ10 + log(91)Ɨ10 + log(61)Ɨ10 + log(21)Ɨ5 = 45.5+45.5+40.2+15.2 = 146.4 +- Endpoint B: log(61)Ɨ10 = 40.2 + +Still favors A but with reasonable margin, not 4x difference. + +### Scenario 2: Long Description Domination +Query: "create user project pipeline" +Without dampening: +- Endpoint A: description contains all 4 words = 20Ɨ4 = 80 points +- Endpoint B: operation="create_pipeline" = 30Ɨ2 = 60 points + +With our algorithm: +- Endpoint A: (20 + 20/2 + 20/3 + 20/4) = 41.7 → log(42.7)Ɨ5 = 18.6 points +- Endpoint B: 30Ɨ2 = 60 → log(61)Ɨ10 = 40.2 points + +Now B correctly ranks higher as it's more specific. + +### Scenario 3: Exact Service Name Match +Query: "projects" +- Service name exactly "projects": 30Ɨ3=90 → log(91)Ɨ10 = 45.5 points +This ensures exact matches still get high scores despite dampening. + +## Weight Configuration + +Weights should be configured based on field importance: +- High (30): service, operation, tag, path - core identifiers +- Medium (20): summary, description - contextual information +- Low (5): method, param - auxiliary information +- Minimal (1-2): param_desc, body_field - verbose/detailed fields + +The weights are passed during document indexing, allowing different OpenAPI +implementations to customize based on their documentation structure. +""" + +import math +import re +from typing import List, Tuple + + +class FieldSearchEngine: + """ + Simple field-based search engine with weighted scoring + Compatible with BM25SearchEngine interface + """ + + def __init__(self, **kwargs): + """Initialize the search engine (kwargs for compatibility with BM25SearchEngine)""" + self.documents = [] + self.corpus_metadata = None + + def build_index(self, documents: List[Tuple[List[Tuple[str, str, int]], str]]): + """ + Build index from documents + + Args: + documents: List of (fields, doc_id) tuples + fields: List of (field_key, field_value, weight) tuples + weight is used as field priority + doc_id: Document identifier string + """ + self.documents = [] + self.corpus_metadata = [] + + for fields, doc_id in documents: + # Store document with structured fields and their weights + doc_fields = {} + field_weights = {} + + for field_key, field_value, weight in fields: + if field_value: + # Group values by field type + if field_key not in doc_fields: + doc_fields[field_key] = [] + field_weights[field_key] = weight + doc_fields[field_key].append(field_value.lower()) + + # Use the highest weight if multiple values for same field + if weight > field_weights.get(field_key, 0): + field_weights[field_key] = weight + + self.documents.append( + {"id": doc_id, "fields": doc_fields, "weights": field_weights} + ) + self.corpus_metadata.append(doc_id) + + def search(self, query: str, top_k: int = 10) -> List[Tuple[float, str]]: + """ + Search documents with field-weighted scoring and logarithmic dampening + + Args: + query: Search query string + top_k: Number of top results to return + + Returns: + List of (score, doc_id) tuples sorted by score descending + """ + if not self.documents: + return [] + + # Tokenize query into words + query_tokens = query.lower().split() + + results = [] + + for doc in self.documents: + # Track scores by field type to apply per-field dampening + field_scores = {} + matched_field_types = set() + + # Check each field type + for field_type, field_values in doc["fields"].items(): + # Get weight from document's field weights + field_weight = doc["weights"].get(field_type, 1.0) + + # Track tokens matched in this field + field_token_scores = [] + matched_tokens = set() + + # For each query token + for token in query_tokens: + # Check if token appears in any value of this field + best_match_score = 0 + + for value in field_values: + if ( + self._match_token(token, value) + and token not in matched_tokens + ): + # Calculate match quality + match_score = 0 + + # Exact match gets highest score + if value == token: + match_score = 3.0 + # Word boundary match (complete word) + elif re.search(r"\b" + re.escape(token) + r"\b", value): + match_score = 2.0 + # Partial match gets base score + else: + match_score = 1.0 + + best_match_score = max(best_match_score, match_score) + + if best_match_score > 0: + matched_tokens.add(token) + field_token_scores.append(field_weight * best_match_score) + matched_field_types.add(field_type) + + # Apply diminishing returns for multiple tokens in same field + if field_token_scores: + # Sort scores in descending order + field_token_scores.sort(reverse=True) + + # Apply decay: 1st token 100%, 2nd 50%, 3rd 33%, etc. + field_total = 0 + for i, token_score in enumerate(field_token_scores): + field_total += token_score / (i + 1) + + # Apply logarithmic dampening per field to prevent single field domination + # This prevents description or other verbose fields from dominating + if field_type in ["description", "param_desc"]: + # Stronger dampening for description fields + field_scores[field_type] = math.log(1 + field_total) * 5 + else: + # Lighter dampening for identifier fields + field_scores[field_type] = math.log(1 + field_total) * 10 + + # Calculate final score + if field_scores: + # Sum all field scores (already dampened per field) + total_score = sum(field_scores.values()) + + # Add diversity bonus for matching multiple field types + diversity_bonus = math.sqrt(len(matched_field_types)) * 3 + + final_score = total_score + diversity_bonus + results.append((final_score, doc["id"])) + + # Sort by score descending and return top k + results.sort(key=lambda x: x[0], reverse=True) + return results[:top_k] + + def _match_token(self, token: str, text: str) -> bool: + """Check if token matches in text""" + return token in text diff --git a/open-strata/src/strata/utils/shared_search.py b/open-strata/src/strata/utils/shared_search.py new file mode 100644 index 00000000..910467c8 --- /dev/null +++ b/open-strata/src/strata/utils/shared_search.py @@ -0,0 +1,202 @@ +""" +Shared search utility for both single_server and strata_server. +Provides type-safe interfaces for searching through MCP tools +Uses a unified generic approach to reduce code duplication. +""" + +from typing import Any, Dict, List, Optional + +from mcp import types + +from strata.utils.bm25_search import BM25SearchEngine + + +class UniversalToolSearcher: + """ + Universal searcher that handles all tool types + using a single unified approach based on function names. + """ + + def __init__(self, mixed_tools_map: Dict[str, List[Any]]): + """ + Initialize universal searcher with mixed tool types. + + Args: + mixed_tools_map: Dictionary mapping categories to tools. + Tools can be either types.Tool objects or dict objects. + """ + self.tools_map = mixed_tools_map + self.search_engine = self._build_index() + + def _get_tool_name(self, tool: Any) -> Optional[str]: + """Extract name from any tool type.""" + if isinstance(tool, types.Tool): + return tool.name if tool.name else None + elif isinstance(tool, dict): + return tool.get("name") + return None + + def _get_tool_field(self, tool: Any, field_name: str, default: Any = None) -> Any: + """Extract field value from any tool type.""" + if isinstance(tool, types.Tool): + return getattr(tool, field_name, default) + elif isinstance(tool, dict): + return tool.get(field_name, default) + return default + + def _build_index(self) -> BM25SearchEngine: + """Build unified search index from all tools.""" + documents = [] + + for category_name, tools in self.tools_map.items(): + for tool in tools: + # Get tool name (function name) + tool_name = self._get_tool_name(tool) + if not tool_name: + continue + + # Build weighted fields + fields = [] + + # Core identifiers - highest weight + fields.append(("category", category_name.lower(), 30)) + fields.append(("operation", tool_name.lower(), 30)) + + # Title if available + title = self._get_tool_field(tool, "title", "") + if title: + fields.append(("title", str(title).lower(), 30)) + + # Description/Summary - highest weight + description = self._get_tool_field(tool, "description", "") + if description: + fields.append(("description", str(description).lower(), 30)) + + summary = self._get_tool_field(tool, "summary", "") + if summary: + fields.append(("summary", str(summary).lower(), 30)) + + tags = self._get_tool_field(tool, "tags", []) + if isinstance(tags, list): + for tag in tags: + if tag: + fields.append(("tag", str(tag).lower(), 30)) + + path = self._get_tool_field(tool, "path", "") + if path: + fields.append(("path", str(path).lower(), 30)) + + method = self._get_tool_field(tool, "method", "") + if method: + fields.append(("method", str(method).lower(), 15)) + + for param_type in ["path_params", "query_params"]: + params = self._get_tool_field(tool, param_type, {}) + for param_name, param_info in params.items(): + fields.append( + (f"{param_type}/{param_name}", param_name.lower(), 15) + ) + if isinstance(param_info, dict): + param_desc = param_info.get("description", "") + if param_desc: + fields.append( + ( + f"{param_type}/{param_name}_desc", + param_desc.lower(), + 15, + ) + ) + + # Body schema fields + body_schema = self._get_tool_field(tool, "body_schema", {}) + for param_name, param_info in body_schema.get("properties", {}).items(): + fields.append((f"body_schema/{param_name}", param_name.lower(), 15)) + if isinstance(param_info, dict): + param_desc = param_info.get("description", "") + if param_desc: + fields.append( + ( + f"body_schema/{param_name}_desc", + param_desc.lower(), + 15, + ) + ) + + response_schema = self._get_tool_field(tool, "response_schema", {}) + for param_name, param_info in response_schema.get( + "properties", {} + ).items(): + fields.append( + (f"response_schema/{param_name}", param_name.lower(), 5) + ) + if isinstance(param_info, dict): + param_desc = param_info.get("description", "") + if param_desc: + fields.append( + ( + f"response_schema/{param_name}_desc", + param_desc.lower(), + 5, + ) + ) + + # Create document ID + doc_id = f"{category_name}::{tool_name}" + if fields: + documents.append((fields, doc_id)) + + # Build search index + search_engine = BM25SearchEngine() + search_engine.build_index(documents) + return search_engine + + def search(self, query: str, max_results: int = 10) -> List[Dict[str, Any]]: + """ + Search through all tools. + + Args: + query: Search query string + max_results: Maximum number of results to return + + Returns: + List of search results with tool information + """ + if self.search_engine is None: + return [] + + # Perform search + search_results = self.search_engine.search(query.lower(), top_k=max_results) + + # Build results + results = [] + + for score, doc_id in search_results: + # Parse doc_id + if "::" not in doc_id: + continue + + category_name, tool_name = doc_id.split("::", 1) + + # Find the tool + if category_name in self.tools_map: + for tool in self.tools_map[category_name]: + if self._get_tool_name(tool) == tool_name: + result = { + "name": tool_name, + "description": self._get_tool_field( + tool, "description", "" + ), + "category_name": category_name, + # "relevance_score": score, + } + + # Add optional fields if they exist + for field in ["title", "summary"]: + value = self._get_tool_field(tool, field) + if value: + result[field] = value + + results.append(result) + break + + return results diff --git a/open-strata/src/strata/utils/tool_integration.py b/open-strata/src/strata/utils/tool_integration.py new file mode 100644 index 00000000..1f4b8f54 --- /dev/null +++ b/open-strata/src/strata/utils/tool_integration.py @@ -0,0 +1,269 @@ +"""Tool integration utilities for adding Strata to various IDEs and editors.""" + +import json +import subprocess +import sys +from pathlib import Path + + +def update_json_recursively(data: dict, keys: list, value) -> dict: + """Recursively update a nested dictionary, creating keys as needed. + + Args: + data: The dictionary to update + keys: List of keys representing the path to update + value: The value to set at the path + + Returns: + The updated dictionary + """ + if not keys: + return data + + if len(keys) == 1: + # Base case: set the value + key = keys[0] + if isinstance(data.get(key), dict) and isinstance(value, dict): + # Merge dictionaries if both are dict type + data[key] = {**data.get(key, {}), **value} + else: + data[key] = value + return data + + # Recursive case: ensure intermediate keys exist + key = keys[0] + if key not in data: + data[key] = {} + elif not isinstance(data[key], dict): + # If the existing value is not a dict, replace it with dict + data[key] = {} + + data[key] = update_json_recursively(data[key], keys[1:], value) + return data + + +def ensure_json_config(config_path: Path) -> dict: + """Ensure JSON configuration file exists and return its content. + + Args: + config_path: Path to the configuration file + + Returns: + The configuration dictionary + """ + # Create directory if it doesn't exist + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Load existing config or create empty one + if config_path.exists(): + try: + with open(config_path, "r", encoding="utf-8") as f: + return json.load(f) + except (json.JSONDecodeError, IOError) as e: + print( + f"Warning: Could not read existing config {config_path}: {e}", + file=sys.stderr, + ) + print("Creating new configuration file", file=sys.stderr) + + return {} + + +def save_json_config(config_path: Path, config: dict) -> None: + """Save JSON configuration to file. + + Args: + config_path: Path to save the configuration + config: Configuration dictionary to save + """ + config_path.parent.mkdir(parents=True, exist_ok=True) + with open(config_path, "w", encoding="utf-8") as f: + json.dump(config, f, indent=2, ensure_ascii=False) + + +def check_cli_available(target: str) -> bool: + """Check if a CLI tool is available. + + Args: + target: Name of the CLI tool to check + + Returns: + True if the CLI tool is available, False otherwise + """ + try: + if target == "vscode": + # Check for VSCode CLI (code command) + result = subprocess.run( + ["code", "--version"], capture_output=True, text=True, timeout=5 + ) + else: + result = subprocess.run( + [target, "--version"], capture_output=True, text=True, timeout=5 + ) + + return result.returncode == 0 + except (subprocess.SubprocessError, FileNotFoundError): + return False + + +def add_strata_to_cursor(scope: str = "user") -> int: + """Add Strata to Cursor MCP configuration. + + Args: + scope: Configuration scope (user, project, or local) + + Returns: + 0 on success, 1 on error + """ + try: + # Determine config path based on scope + if scope == "user": + # User scope: ~/.cursor/mcp.json + cursor_config_path = Path.home() / ".cursor" / "mcp.json" + elif scope in ["project", "local"]: + # Project scope: .cursor/mcp.json in current directory + cursor_config_path = Path.cwd() / ".cursor" / "mcp.json" + else: + print( + f"Error: Unsupported scope '{scope}' for Cursor. Supported: user, project, local", + file=sys.stderr, + ) + return 1 + + print( + f"Adding Strata to Cursor with scope '{scope}' at {cursor_config_path}..." + ) + + # Load or create cursor configuration + cursor_config = ensure_json_config(cursor_config_path) + + # Create Strata server configuration for Cursor + strata_server_config = {"command": "strata", "args": []} + + # Update configuration using recursive update + cursor_config = update_json_recursively( + cursor_config, ["mcpServers", "strata"], strata_server_config + ) + + # Save updated configuration + save_json_config(cursor_config_path, cursor_config) + print("āœ“ Successfully added Strata to Cursor MCP configuration") + return 0 + + except (IOError, OSError) as e: + print(f"Error handling Cursor configuration: {e}", file=sys.stderr) + return 1 + + +def add_strata_to_vscode() -> int: + """Add Strata to VSCode MCP configuration. + + Returns: + 0 on success, 1 on error + """ + try: + # VSCode uses JSON format: code --add-mcp '{"name":"strata","command":"strata"}' + mcp_config = {"name": "strata", "command": "strata"} + mcp_json = json.dumps(mcp_config) + + print("Adding Strata to VSCode...") + cmd = ["code", "--add-mcp", mcp_json] + result = subprocess.run(cmd, capture_output=True, text=True) + + if result.returncode == 0: + print("āœ“ Successfully added Strata to VSCode MCP configuration") + if result.stdout.strip(): + print(result.stdout) + return 0 + else: + print( + f"Error adding Strata to VSCode: {result.stderr.strip()}", + file=sys.stderr, + ) + return result.returncode + + except subprocess.SubprocessError as e: + print(f"Error running VSCode command: {e}", file=sys.stderr) + return 1 + + +def add_strata_to_claude_or_gemini(target: str, scope: str = "user") -> int: + """Add Strata to Claude or Gemini MCP configuration. + + Args: + target: Target CLI tool (claude or gemini) + scope: Configuration scope + + Returns: + 0 on success, 1 on error + """ + try: + # Claude and Gemini use the original format + cmd = [target, "mcp", "add"] + cmd.extend(["--scope", scope]) + cmd.extend(["strata", "strata"]) + + print(f"Adding Strata to {target} with scope '{scope}'...") + result = subprocess.run(cmd, capture_output=True, text=True) + + if result.returncode == 0: + print(f"āœ“ Successfully added Strata to {target} MCP configuration") + if result.stdout.strip(): + print(result.stdout) + return 0 + else: + print( + f"Error adding Strata to {target}: {result.stderr.strip()}", + file=sys.stderr, + ) + return result.returncode + + except subprocess.SubprocessError as e: + print(f"Error running {target} command: {e}", file=sys.stderr) + return 1 + + +def add_strata_to_tool(target: str, scope: str = "user") -> int: + """Add Strata MCP server to specified tool configuration. + + Args: + target: Target tool (claude, gemini, vscode, cursor) + scope: Configuration scope + + Returns: + 0 on success, 1 on error + """ + target = target.lower() + + # Validate target + if target not in ["claude", "gemini", "vscode", "cursor"]: + print( + f"Error: Unsupported target '{target}'. Supported targets: claude, gemini, vscode, cursor", + file=sys.stderr, + ) + return 1 + + # VSCode doesn't support scope parameter + if target == "vscode" and scope != "user": + print( + "Warning: VSCode doesn't support scope parameter, using default behavior", + file=sys.stderr, + ) + + # Check if the target CLI is available (skip for cursor as we handle files directly) + if target != "cursor": + if not check_cli_available(target): + cli_name = "code" if target == "vscode" else target + print( + f"Error: {cli_name} CLI not found. Please install {cli_name} CLI first.", + file=sys.stderr, + ) + return 1 + + # Handle each target + if target == "cursor": + return add_strata_to_cursor(scope) + elif target == "vscode": + return add_strata_to_vscode() + else: # claude or gemini + return add_strata_to_claude_or_gemini(target, scope) diff --git a/open-strata/tests/test_cli.py b/open-strata/tests/test_cli.py new file mode 100644 index 00000000..8b1603ee --- /dev/null +++ b/open-strata/tests/test_cli.py @@ -0,0 +1,484 @@ +"""Tests for CLI commands.""" + +import json +import sys +import tempfile +from pathlib import Path +from unittest.mock import patch + +import pytest + +from strata.cli import create_parser +from strata.config import MCPServerList + + +class TestCLICommands: + """Test CLI commands.""" + + @pytest.fixture + def temp_config(self): + """Create a temporary config file.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + config = {"mcp": {"servers": {}}} + json.dump(config, f) + temp_path = Path(f.name) + + yield temp_path + + # Cleanup + temp_path.unlink(missing_ok=True) + + def test_add_sse_server(self, temp_config): + """Test adding an SSE type server.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "sse", + "--env", + "API_KEY=test123", + "klavis-ai", + "http://localhost:8080/mcp/", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) # Skip program name + result = args.func(args) + + assert result == 0 + + # Verify the config was saved + with open(temp_config, "r") as f: + config = json.load(f) + + assert "klavis-ai" in config["mcp"]["servers"] + server = config["mcp"]["servers"]["klavis-ai"] + assert server["type"] == "sse" + assert server["url"] == "http://localhost:8080/mcp/" + assert server["env"]["API_KEY"] == "test123" + + def test_add_http_server_with_headers(self, temp_config): + """Test adding an HTTP type server with headers.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "http", + "--header", + "Authorization:Bearer token123", + "--header", + "X-Custom-Header:value", + "api-server", + "https://api.example.com/mcp", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify the config was saved + with open(temp_config, "r") as f: + config = json.load(f) + + assert "api-server" in config["mcp"]["servers"] + server = config["mcp"]["servers"]["api-server"] + assert server["type"] == "http" + assert server["url"] == "https://api.example.com/mcp" + assert server["headers"]["Authorization"] == "Bearer token123" + assert server["headers"]["X-Custom-Header"] == "value" + + def test_add_stdio_server(self, temp_config): + """Test adding a stdio type server with dash-prefixed arguments.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "stdio", + "--env", + "GITHUB_TOKEN=test_token", + "github", + "npx", + "--", # Use -- to separate command arguments + "-y", + "@modelcontextprotocol/server-github", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify the config was saved + with open(temp_config, "r") as f: + config = json.load(f) + + assert "github" in config["mcp"]["servers"] + server = config["mcp"]["servers"]["github"] + assert server["command"] == "npx" + assert server["args"] == ["-y", "@modelcontextprotocol/server-github"] + assert server["env"]["GITHUB_TOKEN"] == "test_token" + + def test_add_command_type_normalized_to_stdio(self, temp_config): + """Test that 'command' type is normalized to 'stdio'.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "command", + "test-server", + "echo", + "hello", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify the config was saved with 'stdio' type + with open(temp_config, "r") as f: + config = json.load(f) + + assert "test-server" in config["mcp"]["servers"] + # The type should be normalized to 'stdio', not 'command' + assert ( + "type" not in config["mcp"]["servers"]["test-server"] + or config["mcp"]["servers"]["test-server"].get("type") == "stdio" + ) + + def test_remove_server(self, temp_config): + """Test removing a server.""" + # First add a server + server_list = MCPServerList(temp_config) + from strata.config import MCPServerConfig + + test_server = MCPServerConfig( + name="test-server", type="stdio", command="echo", args=["hello"] + ) + server_list.add_server(test_server) + server_list.save() + + # Now remove it + test_args = [ + "strata", + "--config-path", + str(temp_config), + "remove", + "test-server", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify the server was removed + with open(temp_config, "r") as f: + config = json.load(f) + + assert "test-server" not in config["mcp"]["servers"] + + def test_remove_nonexistent_server(self, temp_config): + """Test removing a server that doesn't exist.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "remove", + "nonexistent", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 1 # Should fail + + def test_list_servers(self, temp_config, capsys): + """Test listing servers.""" + # Add some servers first + server_list = MCPServerList(temp_config) + from strata.config import MCPServerConfig + + server1 = MCPServerConfig( + name="server1", type="sse", url="http://example.com/mcp", enabled=True + ) + server2 = MCPServerConfig( + name="server2", type="stdio", command="echo", args=["hello"], enabled=False + ) + server_list.add_server(server1) + server_list.add_server(server2) + server_list.save() + + # List servers + test_args = ["strata", "--config-path", str(temp_config), "list"] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + captured = capsys.readouterr() + assert "server1 (sse, enabled)" in captured.out + assert "server2 (stdio, disabled)" in captured.out + + def test_enable_server(self, temp_config): + """Test enabling a server.""" + # Add a disabled server + server_list = MCPServerList(temp_config) + from strata.config import MCPServerConfig + + test_server = MCPServerConfig( + name="test-server", + type="stdio", + command="echo", + args=["hello"], + enabled=False, + ) + server_list.add_server(test_server) + server_list.save() + + # Enable it + test_args = [ + "strata", + "--config-path", + str(temp_config), + "enable", + "test-server", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify it was enabled + with open(temp_config, "r") as f: + config = json.load(f) + + assert config["mcp"]["servers"]["test-server"]["enabled"] is True + + def test_disable_server(self, temp_config): + """Test disabling a server.""" + # Add an enabled server + server_list = MCPServerList(temp_config) + from strata.config import MCPServerConfig + + test_server = MCPServerConfig( + name="test-server", + type="stdio", + command="echo", + args=["hello"], + enabled=True, + ) + server_list.add_server(test_server) + server_list.save() + + # Disable it + test_args = [ + "strata", + "--config-path", + str(temp_config), + "disable", + "test-server", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Verify it was disabled + with open(temp_config, "r") as f: + config = json.load(f) + + assert config["mcp"]["servers"]["test-server"]["enabled"] is False + + def test_invalid_env_format(self, temp_config, capsys): + """Test invalid environment variable format.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "stdio", + "--env", + "INVALID_FORMAT", + "test-server", + "echo", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 1 # Should fail + + captured = capsys.readouterr() + assert "Invalid environment variable format" in captured.err + + def test_invalid_header_format(self, temp_config, capsys): + """Test invalid header format.""" + test_args = [ + "strata", + "--config-path", + str(temp_config), + "add", + "--type", + "http", + "--header", + "INVALID_FORMAT", + "test-server", + "http://example.com/", + ] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 1 # Should fail + + captured = capsys.readouterr() + assert "Invalid header format" in captured.err + + def test_cursor_user_scope(self): + """Test adding Strata to Cursor with user scope.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Mock home directory + with patch("pathlib.Path.home", return_value=Path(temp_dir)): + test_args = ["strata", "tool", "add", "cursor", "--scope", "user"] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Check that the config file was created + cursor_config_path = Path(temp_dir) / ".cursor" / "mcp.json" + assert cursor_config_path.exists() + + # Check the content + with open(cursor_config_path, "r") as f: + config = json.load(f) + + assert "mcpServers" in config + assert "strata" in config["mcpServers"] + assert config["mcpServers"]["strata"]["command"] == "strata" + + def test_cursor_project_scope(self): + """Test adding Strata to Cursor with project scope.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Change to temp directory to simulate project directory + original_cwd = Path.cwd() + try: + import os + + os.chdir(temp_dir) + + test_args = ["strata", "tool", "add", "cursor", "--scope", "project"] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Check that the config file was created + cursor_config_path = Path(temp_dir) / ".cursor" / "mcp.json" + assert cursor_config_path.exists() + + # Check the content + with open(cursor_config_path, "r") as f: + config = json.load(f) + + assert "mcpServers" in config + assert "strata" in config["mcpServers"] + assert config["mcpServers"]["strata"]["command"] == "strata" + + finally: + os.chdir(original_cwd) + + def test_cursor_existing_config(self): + """Test adding Strata to existing Cursor configuration.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create existing config + cursor_config_path = Path(temp_dir) / ".cursor" / "mcp.json" + cursor_config_path.parent.mkdir(parents=True) + + existing_config = { + "mcpServers": { + "existing-server": {"command": "some-command", "args": ["--flag"]} + }, + "otherConfig": {"someValue": "test"}, + } + + with open(cursor_config_path, "w") as f: + json.dump(existing_config, f) + + # Change to temp directory + original_cwd = Path.cwd() + try: + import os + + os.chdir(temp_dir) + + test_args = ["strata", "tool", "add", "cursor", "--scope", "project"] + + with patch.object(sys, "argv", test_args): + parser = create_parser() + args = parser.parse_args(test_args[1:]) + result = args.func(args) + + assert result == 0 + + # Check that existing config is preserved + with open(cursor_config_path, "r") as f: + config = json.load(f) + + # Existing server should be preserved + assert "existing-server" in config["mcpServers"] + assert ( + config["mcpServers"]["existing-server"]["command"] == "some-command" + ) + + # Other config should be preserved + assert "otherConfig" in config + assert config["otherConfig"]["someValue"] == "test" + + # Strata should be added + assert "strata" in config["mcpServers"] + assert config["mcpServers"]["strata"]["command"] == "strata" + + finally: + os.chdir(original_cwd) diff --git a/open-strata/tests/test_config.py b/open-strata/tests/test_config.py new file mode 100644 index 00000000..48390c36 --- /dev/null +++ b/open-strata/tests/test_config.py @@ -0,0 +1,452 @@ +"""Test cases for MCPServerList configuration parsing.""" + +import json +import tempfile +from pathlib import Path + +from strata.config import MCPServerConfig, MCPServerList + + +class TestMCPServerList: + """Test MCPServerList functionality.""" + + def test_parse_mcp_config_format(self): + """Test parsing MCP config format with servers section.""" + # Create a sample MCP config + mcp_config = { + "mcp": { + "servers": { + "github": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server", + ], + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${input:github_token}" + }, + }, + "filesystem": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem"], + }, + } + } + } + + # Create temporary config file + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(mcp_config, f) + config_path = Path(f.name) + + try: + # Create MCPServerList instance + server_list = MCPServerList(config_path) + + # Verify servers were loaded correctly + assert len(server_list.servers) == 2 + + # Check GitHub server + github_server = server_list.get_server("github") + assert github_server is not None + assert github_server.name == "github" + assert github_server.command == "docker" + assert github_server.args == [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server", + ] + assert github_server.env == { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${input:github_token}" + } + + # Check filesystem server + fs_server = server_list.get_server("filesystem") + assert fs_server is not None + assert fs_server.name == "filesystem" + assert fs_server.command == "npx" + assert fs_server.args == ["-y", "@modelcontextprotocol/server-filesystem"] + # Description field was removed from MCPServerConfig + + finally: + # Clean up + config_path.unlink() + + def test_parse_legacy_format(self): + """Test parsing legacy servers format (current implementation).""" + legacy_config = { + "servers": { + "test-server": { + "name": "test-server", + "command": "python", + "args": ["-m", "test"], + "env": {"TEST_VAR": "value"}, + "enabled": True, + } + } + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(legacy_config, f) + config_path = Path(f.name) + + try: + server_list = MCPServerList(config_path) + + assert len(server_list.servers) == 1 + server = server_list.get_server("test-server") + assert server is not None + assert server.name == "test-server" + assert server.command == "python" + assert server.args == ["-m", "test"] + assert server.env == {"TEST_VAR": "value"} + assert server.enabled is True + + finally: + config_path.unlink() + + def test_add_server_override(self): + """Test that adding a server with existing name overrides it.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + server_list = MCPServerList(config_path) + + # Add initial server + server1 = MCPServerConfig( + name="test", + command="python", + args=["-m", "test1"], + ) + result1 = server_list.add_server(server1) + + # Verify it was added + assert result1 is True + assert len(server_list.servers) == 1 + assert server_list.get_server("test").command == "python" + + # Add server with same name (should override) + server2 = MCPServerConfig( + name="test", + command="node", + args=["test2.js"], + ) + result2 = server_list.add_server(server2) + + # Verify it was overridden + assert result2 is True + assert len(server_list.servers) == 1 + assert server_list.get_server("test").command == "node" + assert server_list.get_server("test").args == ["test2.js"] + + def test_add_server_returns_false_for_identical(self): + """Test that add_server returns False when configuration is identical.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + server_list = MCPServerList(config_path) + + # Create a server configuration + server = MCPServerConfig( + name="test-server", + command="python", + args=["-m", "test"], + env={"KEY": "value"}, + enabled=True, + ) + + # First add should return True + result1 = server_list.add_server(server) + assert result1 is True + assert "test-server" in server_list.servers + + # Adding identical server should return False + identical_server = MCPServerConfig( + name="test-server", + command="python", + args=["-m", "test"], + env={"KEY": "value"}, + enabled=True, + ) + result2 = server_list.add_server(identical_server) + assert result2 is False + + def test_add_server_returns_true_for_different_config(self): + """Test that add_server returns True when configuration is different.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + server_list = MCPServerList(config_path) + + # Create initial server + server1 = MCPServerConfig( + name="test-server", + command="python", + args=["-m", "test"], + env={"KEY": "value"}, + enabled=True, + ) + + # First add + result1 = server_list.add_server(server1) + assert result1 is True + + # Modified server with same name but different config + server2 = MCPServerConfig( + name="test-server", + command="node", # Different command + args=["-m", "test"], + env={"KEY": "value"}, + enabled=True, + ) + result2 = server_list.add_server(server2) + assert result2 is True + assert server_list.get_server("test-server").command == "node" + + def test_add_server_detects_small_differences(self): + """Test that add_server detects small differences in configuration.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + server_list = MCPServerList(config_path) + + base_server = MCPServerConfig( + name="test", + command="python", + args=["arg1", "arg2"], + env={"KEY1": "value1"}, + enabled=True, + ) + + # Add initial server + assert server_list.add_server(base_server) is True + + # Test different args + server_diff_args = MCPServerConfig( + name="test", + command="python", + args=["arg1", "arg3"], # Different arg + env={"KEY1": "value1"}, + enabled=True, + ) + assert server_list.add_server(server_diff_args) is True + + # Test different env + server_diff_env = MCPServerConfig( + name="test", + command="python", + args=["arg1", "arg3"], + env={"KEY1": "value2"}, # Different value + enabled=True, + ) + assert server_list.add_server(server_diff_env) is True + + # Test different enabled status + server_diff_enabled = MCPServerConfig( + name="test", + command="python", + args=["arg1", "arg3"], + env={"KEY1": "value2"}, + enabled=False, # Different enabled status + ) + assert server_list.add_server(server_diff_enabled) is True + + # Test identical to last one + server_identical = MCPServerConfig( + name="test", + command="python", + args=["arg1", "arg3"], + env={"KEY1": "value2"}, + enabled=False, + ) + assert server_list.add_server(server_identical) is False + + def test_add_new_server_always_returns_true(self): + """Test that adding a new server always returns True.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + server_list = MCPServerList(config_path) + + server1 = MCPServerConfig(name="server1", command="cmd1") + server2 = MCPServerConfig(name="server2", command="cmd2") + + assert server_list.add_server(server1) is True + assert server_list.add_server(server2) is True + assert len(server_list.servers) == 2 + + def test_empty_config_file(self): + """Test handling of empty config file.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump({}, f) + config_path = Path(f.name) + + try: + server_list = MCPServerList(config_path) + assert len(server_list.servers) == 0 + finally: + config_path.unlink() + + def test_nonexistent_config_file(self): + """Test handling when config file doesn't exist.""" + config_path = Path("/tmp/nonexistent_config_12345.json") + server_list = MCPServerList(config_path) + + # Should start with empty servers (no defaults) + assert len(server_list.servers) == 0 + + def test_save_mcp_format(self): + """Test saving servers in MCP format.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + + # Create and save servers in MCP format + server_list = MCPServerList(config_path, use_mcp_format=True) + server = MCPServerConfig( + name="test", + command="python", + args=["-m", "test"], + env={"KEY": "value"}, + ) + server_list.add_server(server) + + # Load the saved file and verify format + with open(config_path, "r") as f: + saved_data = json.load(f) + + # Should save in MCP format + assert "mcp" in saved_data + assert "servers" in saved_data["mcp"] + assert "test" in saved_data["mcp"]["servers"] + + # Verify the saved data structure + test_server = saved_data["mcp"]["servers"]["test"] + assert test_server["command"] == "python" + assert test_server["args"] == ["-m", "test"] + assert test_server["env"] == {"KEY": "value"} + assert test_server["enabled"] is True + + def test_save_legacy_format(self): + """Test saving servers in legacy format.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + + # Create and save servers in legacy format + server_list = MCPServerList(config_path, use_mcp_format=False) + server = MCPServerConfig( + name="test", + command="python", + args=["-m", "test"], + env={"KEY": "value"}, + ) + server_list.add_server(server) + + # Load the saved file and verify format + with open(config_path, "r") as f: + saved_data = json.load(f) + + # Should save in legacy format + assert "servers" in saved_data + assert "test" in saved_data["servers"] + assert saved_data["servers"]["test"]["name"] == "test" + + def test_mixed_env_variables(self): + """Test handling of environment variables with template syntax.""" + config = { + "mcp": { + "servers": { + "mixed-env": { + "command": "test", + "args": [], + "env": { + "NORMAL_VAR": "plain_value", + "TEMPLATE_VAR": "${input:some_token}", + "MIXED_VAR": "prefix_${input:token}_suffix", + }, + } + } + } + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(config, f) + config_path = Path(f.name) + + try: + server_list = MCPServerList(config_path) + server = server_list.get_server("mixed-env") + + assert server is not None + assert server.env["NORMAL_VAR"] == "plain_value" + assert server.env["TEMPLATE_VAR"] == "${input:some_token}" + assert server.env["MIXED_VAR"] == "prefix_${input:token}_suffix" + + finally: + config_path.unlink() + + def test_mcp_format_round_trip(self): + """Test saving and loading servers in MCP format preserves data.""" + with tempfile.TemporaryDirectory() as tmpdir: + config_path = Path(tmpdir) / "test_config.json" + + # Create servers with various configurations + server_list1 = MCPServerList(config_path, use_mcp_format=True) + + server1 = MCPServerConfig( + name="github", + command="docker", + args=["run", "-i", "--rm", "ghcr.io/github/github-mcp-server"], + env={"GITHUB_TOKEN": "${input:github_token}"}, + ) + server_list1.add_server(server1) + + server2 = MCPServerConfig( + name="filesystem", + command="npx", + args=["-y", "@modelcontextprotocol/server-filesystem"], + ) + server_list1.add_server(server2) + + server3 = MCPServerConfig( + name="disabled-server", + command="test", + args=["arg1", "arg2"], + enabled=False, + ) + server_list1.add_server(server3) + + # Load from the same file + server_list2 = MCPServerList(config_path) + + # Verify all servers were loaded correctly + assert len(server_list2.servers) == 3 + + # Check GitHub server + github = server_list2.get_server("github") + assert github.command == "docker" + assert github.args == [ + "run", + "-i", + "--rm", + "ghcr.io/github/github-mcp-server", + ] + assert github.env == {"GITHUB_TOKEN": "${input:github_token}"} + assert github.name == "github" + assert github.enabled is True + + # Check filesystem server + fs = server_list2.get_server("filesystem") + assert fs.command == "npx" + assert fs.args == ["-y", "@modelcontextprotocol/server-filesystem"] + assert fs.name == "filesystem" + assert fs.env == {} + assert fs.enabled is True + + # Check disabled server + disabled = server_list2.get_server("disabled-server") + assert disabled.command == "test" + assert disabled.args == ["arg1", "arg2"] + assert disabled.enabled is False + assert disabled.enabled is False diff --git a/open-strata/tests/test_config_sync_integration.py b/open-strata/tests/test_config_sync_integration.py new file mode 100644 index 00000000..d045c9b3 --- /dev/null +++ b/open-strata/tests/test_config_sync_integration.py @@ -0,0 +1,218 @@ +"""Integration test for config watching and client manager syncing.""" + +import asyncio +import json +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, patch + +import pytest + +from strata.mcp_client_manager import MCPClientManager + + +class TestConfigSyncIntegration: + """Integration tests for config watching + client manager sync.""" + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_config_change_triggers_sync(self, mock_transport, mock_client): + """Test that config file changes trigger client manager sync.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # Create temporary config with initial server + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + initial_config = { + "mcp": { + "servers": { + "initial-server": { + "command": "echo", + "args": ["initial"], + "enabled": True, + } + } + } + } + json.dump(initial_config, f) + config_path = Path(f.name) + + try: + # Create manager and initialize + manager = MCPClientManager(config_path) + await manager.initialize_from_config() + + # Verify initial server is connected + assert "initial-server" in manager.active_clients + assert mock_client_instance.connect.call_count == 1 + + # Track sync calls + sync_calls = [] + original_sync = manager.sync_with_config + + async def track_sync(new_servers): + sync_calls.append(dict(new_servers)) + return await original_sync(new_servers) + + manager.sync_with_config = track_sync + + # Start config watching + watch_task = asyncio.create_task( + manager.server_list.watch_config( + lambda servers: asyncio.create_task( + manager.sync_with_config(servers) + ) + ) + ) + + # Give watcher time to start + await asyncio.sleep(0.1) + + # Update config file to add new server and remove old one + new_config = { + "mcp": { + "servers": { + "new-server": { + "command": "node", + "args": ["server.js"], + "enabled": True, + } + } + } + } + + with open(config_path, "w") as f: + json.dump(new_config, f) + + # Wait for config change to be detected and processed + await asyncio.sleep(0.6) + + # Verify sync was called + assert len(sync_calls) >= 1 + + # Check that the new config has the correct server + last_sync = sync_calls[-1] + assert "new-server" in last_sync + assert "initial-server" not in last_sync + + # Verify client manager state reflects the change + assert "new-server" in manager.active_clients + assert "initial-server" not in manager.active_clients + + # Should have disconnected old and connected new + assert mock_client_instance.disconnect.call_count >= 1 + assert mock_client_instance.connect.call_count >= 2 + + watch_task.cancel() + + finally: + # Cleanup + config_path.unlink(missing_ok=True) + await manager.disconnect_all() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_config_enable_disable_triggers_sync( + self, mock_transport, mock_client + ): + """Test that enabling/disabling servers triggers appropriate sync actions.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # Create temporary config with disabled server + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + initial_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["test"], + "enabled": False, # Disabled initially + } + } + } + } + json.dump(initial_config, f) + config_path = Path(f.name) + + try: + # Create manager and initialize + manager = MCPClientManager(config_path) + await manager.initialize_from_config() + + # Verify no servers are connected (server is disabled) + assert len(manager.active_clients) == 0 + assert mock_client_instance.connect.call_count == 0 + + # Start config watching + watch_task = asyncio.create_task( + manager.server_list.watch_config( + lambda servers: asyncio.create_task( + manager.sync_with_config(servers) + ) + ) + ) + + # Give watcher time to start + await asyncio.sleep(0.1) + + # Update config to enable the server + enabled_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["test"], + "enabled": True, # Now enabled + } + } + } + } + + with open(config_path, "w") as f: + json.dump(enabled_config, f) + + # Wait for config change to be detected and processed + await asyncio.sleep(0.6) + + # Verify server is now connected + assert "test-server" in manager.active_clients + assert mock_client_instance.connect.call_count == 1 + + # Now disable the server again + disabled_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["test"], + "enabled": False, # Disabled again + } + } + } + } + + with open(config_path, "w") as f: + json.dump(disabled_config, f) + + # Wait for config change to be detected and processed + await asyncio.sleep(0.6) + + # Verify server is now disconnected + assert "test-server" not in manager.active_clients + assert mock_client_instance.disconnect.call_count == 1 + + watch_task.cancel() + + finally: + # Cleanup + config_path.unlink(missing_ok=True) + await manager.disconnect_all() diff --git a/open-strata/tests/test_config_watch.py b/open-strata/tests/test_config_watch.py new file mode 100644 index 00000000..28de00cd --- /dev/null +++ b/open-strata/tests/test_config_watch.py @@ -0,0 +1,300 @@ +"""Tests for config watching functionality.""" + +import asyncio +import json +import tempfile +from pathlib import Path + +import pytest + +from strata.config import MCPServerList + + +class TestConfigWatch: + """Test config file watching functionality.""" + + @pytest.fixture + def temp_config(self): + """Create a temporary config file.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + config = {"mcp": {"servers": {}}} + json.dump(config, f) + temp_path = Path(f.name) + + yield temp_path + + # Cleanup + temp_path.unlink(missing_ok=True) + + @pytest.mark.asyncio + async def test_watch_config_add_server(self, temp_config): + """Test that adding a server triggers the callback.""" + server_list = MCPServerList(temp_config) + + # Track callback calls + callback_calls = [] + + def on_changed(servers): + callback_calls.append(servers.copy()) + + # Start watching in background + watch_task = asyncio.create_task(server_list.watch_config(on_changed)) + + try: + # Give watcher time to start + await asyncio.sleep(0.1) + + # Add a server by modifying the file directly + new_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["hello"], + "enabled": True, + } + } + } + } + + with open(temp_config, "w") as f: + json.dump(new_config, f) + + # Wait for change to be detected + await asyncio.sleep(0.5) + + # Check callback was called + assert len(callback_calls) == 1 + assert "test-server" in callback_calls[0] + assert callback_calls[0]["test-server"].name == "test-server" + assert callback_calls[0]["test-server"].command == "echo" + + finally: + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + pass + + @pytest.mark.asyncio + async def test_watch_config_remove_server(self, temp_config): + """Test that removing a server triggers the callback.""" + # Start with a server + initial_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["hello"], + "enabled": True, + } + } + } + } + with open(temp_config, "w") as f: + json.dump(initial_config, f) + + server_list = MCPServerList(temp_config) + + # Track callback calls + callback_calls = [] + + def on_changed(servers): + callback_calls.append(servers.copy()) + + # Start watching in background + watch_task = asyncio.create_task(server_list.watch_config(on_changed)) + + try: + # Give watcher time to start + await asyncio.sleep(0.1) + + # Remove the server + new_config = {"mcp": {"servers": {}}} + + with open(temp_config, "w") as f: + json.dump(new_config, f) + + # Wait for change to be detected + await asyncio.sleep(0.5) + + # Check callback was called with empty servers + assert len(callback_calls) == 1 + assert len(callback_calls[0]) == 0 + + finally: + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + pass + + @pytest.mark.asyncio + async def test_watch_config_enable_disable(self, temp_config): + """Test that enabling/disabling a server triggers the callback.""" + # Start with a disabled server + initial_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["hello"], + "enabled": False, + } + } + } + } + with open(temp_config, "w") as f: + json.dump(initial_config, f) + + server_list = MCPServerList(temp_config) + + # Track callback calls + callback_calls = [] + + def on_changed(servers): + callback_calls.append(servers.copy()) + + # Start watching in background + watch_task = asyncio.create_task(server_list.watch_config(on_changed)) + + try: + # Give watcher time to start + await asyncio.sleep(0.1) + + # Enable the server + new_config = { + "mcp": { + "servers": { + "test-server": { + "command": "echo", + "args": ["hello"], + "enabled": True, + } + } + } + } + + with open(temp_config, "w") as f: + json.dump(new_config, f) + + # Wait for change to be detected + await asyncio.sleep(0.5) + + # Check callback was called with enabled server + assert len(callback_calls) == 1 + assert callback_calls[0]["test-server"].enabled is True + + # Now disable it + new_config["mcp"]["servers"]["test-server"]["enabled"] = False + + with open(temp_config, "w") as f: + json.dump(new_config, f) + + # Wait for change to be detected + await asyncio.sleep(0.5) + + # Check callback was called again with disabled server + assert len(callback_calls) == 2 + assert callback_calls[1]["test-server"].enabled is False + + finally: + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + pass + + @pytest.mark.asyncio + async def test_watch_config_multiple_changes(self, temp_config): + """Test that multiple rapid changes all trigger callbacks.""" + server_list = MCPServerList(temp_config) + + # Track callback calls + callback_calls = [] + + def on_changed(servers): + callback_calls.append(servers.copy()) + + # Start watching in background + watch_task = asyncio.create_task(server_list.watch_config(on_changed)) + + try: + # Give watcher time to start + await asyncio.sleep(0.1) + + # Make multiple changes + for i in range(3): + new_config = { + "mcp": { + "servers": { + f"server-{i}": { + "command": "echo", + "args": [f"hello-{i}"], + "enabled": True, + } + } + } + } + + with open(temp_config, "w") as f: + json.dump(new_config, f) + + # Small delay between changes + await asyncio.sleep(0.3) + + # Wait a bit more for all changes to be processed + await asyncio.sleep(0.5) + + # Check at least some changes were detected (watchgod may batch changes) + assert len(callback_calls) >= 1 + + # Check the last state has server-2 (the final state) + last_servers = callback_calls[-1] + assert "server-2" in last_servers + assert last_servers["server-2"].args == ["hello-2"] + + finally: + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + pass + + @pytest.mark.asyncio + async def test_watch_config_ignores_other_files(self, temp_config): + """Test that changes to other files in the directory are ignored.""" + server_list = MCPServerList(temp_config) + + # Track callback calls + callback_calls = [] + + def on_changed(servers): + callback_calls.append(servers.copy()) + + # Start watching in background + watch_task = asyncio.create_task(server_list.watch_config(on_changed)) + + try: + # Give watcher time to start + await asyncio.sleep(0.1) + + # Create another file in the same directory + other_file = temp_config.parent / "other.txt" + other_file.write_text("some content") + + # Wait to see if it triggers a callback + await asyncio.sleep(0.5) + + # Should not have triggered any callbacks + assert len(callback_calls) == 0 + + # Clean up the other file + other_file.unlink() + + finally: + watch_task.cancel() + try: + await watch_task + except asyncio.CancelledError: + pass diff --git a/open-strata/tests/test_mcp_client.py b/open-strata/tests/test_mcp_client.py new file mode 100644 index 00000000..d331a329 --- /dev/null +++ b/open-strata/tests/test_mcp_client.py @@ -0,0 +1,298 @@ +"""Test MCP Client with different transport methods using pytest.""" + +import json +import os +import shutil + +import pytest +import pytest_asyncio + +from strata.mcp_proxy import MCPClient +from strata.mcp_proxy.transport import HTTPTransport, StdioTransport + +# GitHub PAT from environment variable +GITHUB_PAT = os.getenv("GITHUB_PAT") or "" +if not GITHUB_PAT: + raise ValueError( + "GITHUB_PAT environment variable is required. " + "Set it with: export GITHUB_PAT='your_github_personal_access_token'" + ) + + +def __get_container_runtime(): + """Detect available container runtime.""" + if shutil.which("podman"): + return "podman" + elif shutil.which("docker"): + return "docker" + else: + raise RuntimeError("Neither podman nor docker found in PATH") + + +# Detect container runtime (podman or docker) +CONTAINER_RUNTIME = __get_container_runtime() +print(f"Using container runtime: {CONTAINER_RUNTIME}") + + +@pytest_asyncio.fixture +async def http_client(): + """Create HTTP client fixture.""" + transport = HTTPTransport( + url="https://api.githubcopilot.com/mcp/", + mode="http", + headers={"Authorization": f"Bearer {GITHUB_PAT}"}, + ) + client = MCPClient(transport) + await client.connect() + try: + yield client + finally: + if client.is_connected(): + await client.disconnect() + + +@pytest_asyncio.fixture +async def stdio_client(): + """Create stdio client fixture using available container runtime.""" + transport = StdioTransport( + command=CONTAINER_RUNTIME, + args=[ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server", + ], + env={"GITHUB_PERSONAL_ACCESS_TOKEN": GITHUB_PAT}, + ) + client = MCPClient(transport) + await client.connect() + try: + yield client + finally: + if client.is_connected(): + await client.disconnect() + + +class TestHTTPTransport: + """Test HTTP transport functionality.""" + + @pytest.mark.asyncio + async def test_connection(self, http_client): + """Test HTTP connection to GitHub MCP server.""" + assert http_client.is_connected() + print("āœ“ Connected via HTTP") + + @pytest.mark.asyncio + async def test_list_tools(self, http_client): + """Test listing tools via HTTP.""" + tools = await http_client.list_tools() + assert len(tools) > 0 + print(f"āœ“ Found {len(tools)} tools via HTTP") + + # Print sample tools + for i, tool in enumerate(tools[:3]): + print(f" Tool {i+1}: {tool['name']}") + + @pytest.mark.asyncio + async def test_tool_call_search_code(self, http_client): + """Test calling search_code tool via HTTP.""" + # Search for a simple query in the github/docs repo + result = await http_client.call_tool( + "search_code", {"query": "README repo:github/docs", "perPage": 3} + ) + + assert result is not None + + # Parse result + if hasattr(result, "content") and result.content: + content = result.content[0] + if hasattr(content, "text"): + data = json.loads(content.text) + assert "items" in data or "total_count" in data + print(f"āœ“ Search returned {data.get('total_count', 0)} results") + if "items" in data and data["items"]: + print(f" First result: {data['items'][0].get('path', 'N/A')}") + + @pytest.mark.asyncio + async def test_tool_call_get_me(self, http_client): + """Test calling get_me tool via HTTP.""" + result = await http_client.call_tool("get_me", {}) + + assert result is not None + + # Parse result + if hasattr(result, "content") and result.content: + content = result.content[0] + if hasattr(content, "text"): + data = json.loads(content.text) + assert "login" in data or "id" in data + print(f"āœ“ Got user: {data.get('login', data.get('id'))}") + print(f" Name: {data.get('name', 'N/A')}") + print(f" Public repos: {data.get('public_repos', 'N/A')}") + + +class TestStdioTransport: + """Test stdio transport functionality.""" + + @pytest.mark.asyncio + async def test_connection(self, stdio_client): + """Test stdio connection via container runtime.""" + assert stdio_client.is_connected() + print(f"āœ“ Connected via stdio/{CONTAINER_RUNTIME}") + + @pytest.mark.asyncio + async def test_list_tools(self, stdio_client): + """Test listing tools via stdio.""" + tools = await stdio_client.list_tools() + assert len(tools) > 0 + print(f"āœ“ Found {len(tools)} tools via stdio") + + # Print sample tools + for i, tool in enumerate(tools[:3]): + print(f" Tool {i+1}: {tool['name']}") + + @pytest.mark.asyncio + async def test_tool_call_search_code(self, stdio_client): + """Test calling search_code tool via stdio.""" + # Search for a simple query + result = await stdio_client.call_tool( + "search_code", {"query": "README repo:github/docs", "perPage": 3} + ) + + assert result is not None + + # Parse result + if hasattr(result, "content") and result.content: + content = result.content[0] + if hasattr(content, "text"): + data = json.loads(content.text) + assert "items" in data or "total_count" in data + print(f"āœ“ Search returned {data.get('total_count', 0)} results") + if "items" in data and data["items"]: + print(f" First result: {data['items'][0].get('path', 'N/A')}") + + @pytest.mark.asyncio + async def test_tool_call_get_me(self, stdio_client): + """Test calling get_me tool via stdio.""" + result = await stdio_client.call_tool("get_me", {}) + + assert result is not None + + # Parse result + if hasattr(result, "content") and result.content: + content = result.content[0] + if hasattr(content, "text"): + data = json.loads(content.text) + assert "login" in data or "id" in data + print(f"āœ“ Got user: {data.get('login', data.get('id'))}") + print(f" Name: {data.get('name', 'N/A')}") + print(f" Public repos: {data.get('public_repos', 'N/A')}") + + +class TestContextManager: + """Test context manager functionality.""" + + @pytest.mark.asyncio + async def test_http_context_manager(self): + """Test HTTP client as context manager.""" + transport = HTTPTransport( + url="https://api.githubcopilot.com/mcp/", + mode="http", + headers={"Authorization": f"Bearer {GITHUB_PAT}"}, + ) + + async with MCPClient(transport) as client: + assert client.is_connected() + tools = await client.list_tools() + assert len(tools) > 0 + print(f"āœ“ Context manager: Found {len(tools)} tools") + + # Should be disconnected after exiting context + assert not client.is_connected() + print("āœ“ Context manager: Auto-disconnected") + + @pytest.mark.asyncio + async def test_stdio_context_manager(self): + """Test stdio client as context manager.""" + transport = StdioTransport( + command=CONTAINER_RUNTIME, + args=[ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server", + ], + env={"GITHUB_PERSONAL_ACCESS_TOKEN": GITHUB_PAT}, + ) + + async with MCPClient(transport) as client: + assert client.is_connected() + tools = await client.list_tools() + assert len(tools) > 0 + print(f"āœ“ Context manager: Found {len(tools)} tools") + + # Should be disconnected after exiting context + assert not client.is_connected() + print("āœ“ Context manager: Auto-disconnected") + + +class TestToolCaching: + """Test tool caching functionality.""" + + @pytest.mark.asyncio + async def test_tool_cache(self, http_client): + """Test that tools are cached after first retrieval.""" + # First call - should fetch from server + tools1 = await http_client.list_tools(use_cache=False) + assert len(tools1) > 0 + + # Second call with cache - should return same tools + tools2 = await http_client.list_tools(use_cache=True) + assert tools1 == tools2 + print("āœ“ Tool caching works correctly") + + # Force refresh without cache + tools3 = await http_client.list_tools(use_cache=False) + assert len(tools3) == len(tools1) + print("āœ“ Cache bypass works correctly") + + +class TestErrorHandling: + """Test error handling.""" + + @pytest.mark.asyncio + async def test_invalid_tool_call(self, http_client): + """Test handling of invalid tool calls.""" + with pytest.raises(Exception) as exc_info: + await http_client.call_tool("non_existent_tool", {"param": "value"}) + + assert ( + "not found" in str(exc_info.value).lower() + or "error" in str(exc_info.value).lower() + ) + print("āœ“ Invalid tool call handled correctly") + + @pytest.mark.asyncio + @staticmethod + async def test_disconnected_client(): + """Test operations on disconnected client.""" + transport = HTTPTransport( + url="https://api.githubcopilot.com/mcp/", + mode="http", + headers={"Authorization": f"Bearer {GITHUB_PAT}"}, + ) + client = MCPClient(transport) + + # Should not be connected initially + assert not client.is_connected() + + # Operations should fail + with pytest.raises(RuntimeError) as exc_info: + await client.list_tools() + + assert "not connected" in str(exc_info.value).lower() + print("āœ“ Disconnected client errors handled correctly") diff --git a/open-strata/tests/test_mcp_client_manager.py b/open-strata/tests/test_mcp_client_manager.py new file mode 100644 index 00000000..99691921 --- /dev/null +++ b/open-strata/tests/test_mcp_client_manager.py @@ -0,0 +1,376 @@ +"""Test cases for MCP Client Manager.""" + +import json +import os +import shutil +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +import pytest_asyncio + +from strata.mcp_client_manager import MCPClientManager + + +def get_container_runtime(): + """Detect available container runtime.""" + if shutil.which("podman"): + return "podman" + elif shutil.which("docker"): + return "docker" + else: + pytest.skip("Neither podman nor docker found in PATH") + + +@pytest_asyncio.fixture +async def temp_config(): + """Create a temporary config file with test servers.""" + config = { + "mcp": { + "servers": { + "test-server-1": { + "command": "python", + "args": ["-m", "test_server"], + "env": {"TEST_VAR": "value1"}, + "enabled": True, + }, + "test-server-2": { + "command": "node", + "args": ["server.js"], + "env": {"NODE_ENV": "test"}, + "enabled": False, + }, + } + } + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(config, f) + config_path = Path(f.name) + + yield config_path + + # Cleanup + config_path.unlink() + + +@pytest_asyncio.fixture +async def manager_with_config(temp_config): + """Create a manager with test configuration.""" + manager = MCPClientManager(temp_config) + yield manager + # Cleanup - disconnect all + await manager.disconnect_all() + + +@pytest_asyncio.fixture +async def manager_with_github_server(): + """Create a manager with real GitHub MCP server configuration.""" + # Get GitHub PAT from environment + github_pat = os.getenv("GITHUB_PAT", "") + if not github_pat: + pytest.skip("GITHUB_PAT environment variable not set") + + container_runtime = get_container_runtime() + + # Create temporary config with real GitHub MCP server + config = { + "mcp": { + "servers": { + "github": { + "command": container_runtime, + "args": [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server", + ], + "env": {"GITHUB_PERSONAL_ACCESS_TOKEN": github_pat}, + "enabled": True, + }, + "disabled-server": { + "command": "echo", + "args": ["test"], + "enabled": False, + }, + } + } + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(config, f) + config_path = Path(f.name) + + manager = MCPClientManager(config_path) + + yield manager + + # Cleanup + await manager.disconnect_all() + config_path.unlink() + + +class TestMCPClientManager: + """Test cases for MCPClientManager.""" + + @pytest.mark.asyncio + async def test_initialization(self, temp_config): + """Test manager initialization.""" + manager = MCPClientManager(temp_config) + + # Check that servers are loaded + assert len(manager.server_list.servers) == 2 + assert "test-server-1" in manager.server_list.servers + assert "test-server-2" in manager.server_list.servers + + # No active clients initially + assert len(manager.active_clients) == 0 + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_initialize_from_config( + self, mock_transport, mock_client, manager_with_config + ): + """Test initializing clients from configuration.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + # Initialize from config (only enabled servers) + results = await manager_with_config.initialize_from_config() + + # Only test-server-1 should be initialized (it's enabled) + assert results == {"test-server-1": True} + assert len(manager_with_config.active_clients) == 1 + assert "test-server-1" in manager_with_config.active_clients + + # Verify transport was created with correct params + mock_transport.assert_called_once_with( + command="python", args=["-m", "test_server"], env={"TEST_VAR": "value1"} + ) + + # Verify client was connected + mock_client_instance.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_disconnect_all( + self, mock_transport, mock_client, manager_with_config + ): + """Test disconnecting from all servers.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + # Initialize first + await manager_with_config.initialize_from_config() + assert len(manager_with_config.active_clients) == 1 + + # Disconnect all + await manager_with_config.disconnect_all() + + assert len(manager_with_config.active_clients) == 0 + # Verify disconnect was called + mock_client_instance.disconnect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_list_active_servers( + self, mock_transport, mock_client, manager_with_config + ): + """Test listing active servers.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + # Initially no active servers + assert manager_with_config.list_active_servers() == [] + + # Initialize from config + await manager_with_config.initialize_from_config() + + # Should have one active server (test-server-1) + active_servers = manager_with_config.list_active_servers() + assert len(active_servers) == 1 + assert "test-server-1" in active_servers + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_is_connected(self, mock_transport, mock_client, manager_with_config): + """Test checking if a server is connected.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.is_connected = MagicMock(return_value=True) + mock_client.return_value = mock_client_instance + + # Initially not connected + assert not manager_with_config.is_connected("test-server-1") + + # Initialize from config + await manager_with_config.initialize_from_config() + + # Now should be connected + assert manager_with_config.is_connected("test-server-1") + assert not manager_with_config.is_connected("test-server-2") # Disabled + assert not manager_with_config.is_connected("nonexistent") # Doesn't exist + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_get_client(self, mock_transport, mock_client, manager_with_config): + """Test getting a client by server name.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + # Initialize from config + await manager_with_config.initialize_from_config() + + # Get client + client = manager_with_config.get_client("test-server-1") + assert client == mock_client_instance + + # Try to get non-existent client + with pytest.raises(KeyError): + manager_with_config.get_client("nonexistent") + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_reconnect_server( + self, mock_transport, mock_client, manager_with_config + ): + """Test reconnecting to a server.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + # Initialize from config + await manager_with_config.initialize_from_config() + assert mock_client_instance.connect.call_count == 1 + + # Reconnect + success = await manager_with_config.reconnect_server("test-server-1") + assert success is True + + # Should have disconnected and connected again + assert mock_client_instance.disconnect.call_count == 1 + assert mock_client_instance.connect.call_count == 2 + + # Try to reconnect non-existent server + success = await manager_with_config.reconnect_server("nonexistent") + assert success is False + + # Try to reconnect disabled server + success = await manager_with_config.reconnect_server("test-server-2") + assert success is False + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_context_manager(self, mock_transport, mock_client, temp_config): + """Test using manager as async context manager.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client.return_value = mock_client_instance + + async with MCPClientManager(temp_config) as manager: + # Should auto-initialize + assert len(manager.active_clients) == 1 + assert mock_client_instance.connect.call_count == 1 + + # Should auto-disconnect + assert mock_client_instance.disconnect.call_count == 1 + + +@pytest.mark.integration +class TestMCPClientManagerIntegration: + """Integration tests for MCPClientManager with real MCP servers.""" + + @pytest.mark.asyncio + async def test_initialize_and_connect_real_server(self, manager_with_github_server): + """Test initializing and connecting to real GitHub MCP server.""" + # Initialize + results = await manager_with_github_server.initialize_from_config() + + # Should connect to github server (enabled) but not disabled-server + assert "github" in results + assert results["github"] is True + assert "disabled-server" not in results + + # Verify it's actually connected + assert manager_with_github_server.is_connected("github") + assert "github" in manager_with_github_server.list_active_servers() + + # Get the client and test it + client = manager_with_github_server.get_client("github") + assert client is not None + + # Test that we can call a real method + tools = await client.list_tools() + assert isinstance(tools, list) + assert len(tools) > 0 + print(f"āœ“ Connected to GitHub MCP server, found {len(tools)} tools") + + @pytest.mark.asyncio + async def test_reconnect_real_server(self, manager_with_github_server): + """Test reconnecting to a real server.""" + # Initialize + await manager_with_github_server.initialize_from_config() + assert manager_with_github_server.is_connected("github") + + # Get initial tools + client1 = manager_with_github_server.get_client("github") + tools1 = await client1.list_tools() + + # Reconnect + success = await manager_with_github_server.reconnect_server("github") + assert success is True + assert manager_with_github_server.is_connected("github") + + # Get tools after reconnect + client2 = manager_with_github_server.get_client("github") + tools2 = await client2.list_tools() + + # Should have same tools + assert len(tools2) == len(tools1) + print(f"āœ“ Reconnected successfully, tools still available: {len(tools2)}") + + @pytest.mark.asyncio + async def test_use_real_tool(self, manager_with_github_server): + """Test using a real tool from the GitHub MCP server.""" + # Initialize + await manager_with_github_server.initialize_from_config() + + # Get client + client = manager_with_github_server.get_client("github") + + # List tools to find one we can test + tools = await client.list_tools() + + # Find a simple tool like get_user + user_tool = next( + (t for t in tools if "user" in t.get("name", "").lower()), None + ) + + if user_tool: + # Try to call the tool + try: + result = await client.call_tool( + user_tool["name"], {"username": "octocat"} + ) + print(f"āœ“ Successfully called tool {user_tool['name']}") + if result: + data = result[0].text if hasattr(result[0], "text") else str(result) + print(f" Got result: {data[:100]}...") + except Exception as e: + print(f" Tool call failed (expected for some tools): {e}") + else: + print(" No user tool found to test") diff --git a/open-strata/tests/test_mcp_client_manager_sync.py b/open-strata/tests/test_mcp_client_manager_sync.py new file mode 100644 index 00000000..83997cd1 --- /dev/null +++ b/open-strata/tests/test_mcp_client_manager_sync.py @@ -0,0 +1,355 @@ +"""Tests for MCPClientManager sync_with_config functionality.""" + +import asyncio +import json +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +import pytest_asyncio + +from strata.config import MCPServerConfig +from strata.mcp_client_manager import MCPClientManager + + +@pytest_asyncio.fixture +async def manager_with_mocks(): + """Create a manager with mocked clients for testing.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + config = {"mcp": {"servers": {}}} + json.dump(config, f) + config_path = Path(f.name) + + manager = MCPClientManager(config_path) + + yield manager + + await manager.disconnect_all() + config_path.unlink() + + +class TestSyncWithConfig: + """Test sync_with_config method.""" + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_add_new_server( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing when a new server is added.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # Create new server config + new_servers = { + "test-server": MCPServerConfig( + name="test-server", command="echo", args=["hello"], enabled=True + ) + } + + # Sync with new config + await manager_with_mocks.sync_with_config(new_servers) + + # Verify server was connected + assert "test-server" in manager_with_mocks.active_clients + mock_client_instance.connect.assert_called_once() + mock_client_instance.disconnect.assert_not_called() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_remove_server( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing when a server is removed.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # First add a server + manager_with_mocks.active_clients["test-server"] = mock_client_instance + + # Sync with empty config (removes the server) + await manager_with_mocks.sync_with_config({}) + + # Verify server was disconnected + assert "test-server" not in manager_with_mocks.active_clients + mock_client_instance.disconnect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_disable_server( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing when a server is disabled.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # First add an active server + manager_with_mocks.active_clients["test-server"] = mock_client_instance + + # Sync with disabled server + new_servers = { + "test-server": MCPServerConfig( + name="test-server", + command="echo", + args=["hello"], + enabled=False, # Disabled + ) + } + + await manager_with_mocks.sync_with_config(new_servers) + + # Verify server was disconnected + assert "test-server" not in manager_with_mocks.active_clients + mock_client_instance.disconnect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_enable_server( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing when a server is enabled.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # Start with no active servers + assert len(manager_with_mocks.active_clients) == 0 + + # Sync with enabled server + new_servers = { + "test-server": MCPServerConfig( + name="test-server", command="echo", args=["hello"], enabled=True + ) + } + + await manager_with_mocks.sync_with_config(new_servers) + + # Verify server was connected + assert "test-server" in manager_with_mocks.active_clients + mock_client_instance.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_config_change_reconnects( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing when server config changes (should reconnect).""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # First add a server + manager_with_mocks.active_clients["test-server"] = mock_client_instance + + # Sync with modified config + new_servers = { + "test-server": MCPServerConfig( + name="test-server", + command="node", # Changed from echo + args=["server.js"], # Changed args + enabled=True, + ) + } + + await manager_with_mocks.sync_with_config(new_servers) + + # Verify server was reconnected (disconnect + connect) + assert "test-server" in manager_with_mocks.active_clients + mock_client_instance.disconnect.assert_called_once() + mock_client_instance.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_multiple_changes( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing with multiple changes at once.""" + # Setup mocks + mock_client_instance1 = AsyncMock() + mock_client_instance1.connect = AsyncMock() + mock_client_instance1.disconnect = AsyncMock() + + mock_client_instance2 = AsyncMock() + mock_client_instance2.connect = AsyncMock() + mock_client_instance2.disconnect = AsyncMock() + + # Return different instances for different calls + mock_client.side_effect = [mock_client_instance1, mock_client_instance2] + + # Start with one active server + manager_with_mocks.active_clients["old-server"] = mock_client_instance1 + + # Sync with new config: remove old, add two new + new_servers = { + "new-server-1": MCPServerConfig( + name="new-server-1", command="python", args=["server1.py"], enabled=True + ), + "new-server-2": MCPServerConfig( + name="new-server-2", command="node", args=["server2.js"], enabled=True + ), + } + + await manager_with_mocks.sync_with_config(new_servers) + + # Verify old server was removed + assert "old-server" not in manager_with_mocks.active_clients + mock_client_instance1.disconnect.assert_called_once() + + # Verify new servers were added + assert "new-server-1" in manager_with_mocks.active_clients + assert "new-server-2" in manager_with_mocks.active_clients + mock_client_instance1.connect.assert_called_once() + mock_client_instance2.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_with_http_server( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test syncing with HTTP/SSE servers.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client.return_value = mock_client_instance + + with patch("strata.mcp_client_manager.HTTPTransport") as mock_http_transport: + mock_http_transport_instance = MagicMock() + mock_http_transport.return_value = mock_http_transport_instance + + # Sync with HTTP server + new_servers = { + "api-server": MCPServerConfig( + name="api-server", + type="http", + url="https://api.example.com/mcp", + headers={"Authorization": "Bearer token"}, + enabled=True, + ) + } + + await manager_with_mocks.sync_with_config(new_servers) + + # Verify HTTP transport was created with correct params + mock_http_transport.assert_called_once_with( + url="https://api.example.com/mcp", + mode="http", + headers={"Authorization": "Bearer token"}, + ) + + # Verify server was connected + assert "api-server" in manager_with_mocks.active_clients + mock_client_instance.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_error_handling( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test that sync continues even if one server fails.""" + # Setup mocks - first server fails, second succeeds + mock_client_instance1 = AsyncMock() + mock_client_instance1.connect = AsyncMock( + side_effect=Exception("Connection failed") + ) + + mock_client_instance2 = AsyncMock() + mock_client_instance2.connect = AsyncMock() + + mock_client.side_effect = [mock_client_instance1, mock_client_instance2] + + # Sync with two servers + new_servers = { + "failing-server": MCPServerConfig( + name="failing-server", command="bad-command", args=[], enabled=True + ), + "working-server": MCPServerConfig( + name="working-server", command="echo", args=["hello"], enabled=True + ), + } + + await manager_with_mocks.sync_with_config(new_servers) + + # First server should fail and not be in active clients + assert "failing-server" not in manager_with_mocks.active_clients + + # Second server should succeed + assert "working-server" in manager_with_mocks.active_clients + mock_client_instance2.connect.assert_called_once() + + @pytest.mark.asyncio + @patch("strata.mcp_client_manager.MCPClient") + @patch("strata.mcp_client_manager.StdioTransport") + async def test_sync_mutex_prevents_concurrent_operations( + self, mock_transport, mock_client, manager_with_mocks + ): + """Test that mutex prevents concurrent sync operations.""" + # Setup mocks + mock_client_instance = AsyncMock() + mock_client_instance.connect = AsyncMock() + mock_client_instance.disconnect = AsyncMock() + mock_client.return_value = mock_client_instance + + # Track the order of operations + operations = [] + + # Create a slow connect operation + async def slow_connect(): + operations.append("connect_start") + await asyncio.sleep(0.1) # Simulate slow connection + operations.append("connect_end") + + mock_client_instance.connect = slow_connect + + # Create two different server configs + servers1 = { + "server1": MCPServerConfig( + name="server1", command="echo", args=["1"], enabled=True + ) + } + + servers2 = { + "server2": MCPServerConfig( + name="server2", command="echo", args=["2"], enabled=True + ) + } + + # Start two sync operations concurrently + task1 = asyncio.create_task(manager_with_mocks.sync_with_config(servers1)) + task2 = asyncio.create_task(manager_with_mocks.sync_with_config(servers2)) + + # Wait for both to complete + await asyncio.gather(task1, task2) + + # Verify operations were serialized (not interleaved) + # Should see connect_start, connect_end, connect_start, connect_end + assert len(operations) == 4 + assert operations[0] == "connect_start" + assert operations[1] == "connect_end" + assert operations[2] == "connect_start" + assert operations[3] == "connect_end" + + # Only one server should remain active (the last one) + assert len(manager_with_mocks.active_clients) == 1 + assert "server2" in manager_with_mocks.active_clients diff --git a/open-strata/tests/test_server_integration.py b/open-strata/tests/test_server_integration.py new file mode 100644 index 00000000..41e16d26 --- /dev/null +++ b/open-strata/tests/test_server_integration.py @@ -0,0 +1,786 @@ +"""Integration tests for server.py tool_calls without mocking.""" + +import asyncio +import json +import logging +from typing import Any, Dict + +import pytest +import pytest_asyncio +from mcp.server.lowlevel import Server +from mcp.server.stdio import stdio_server + +from strata.config import MCPServerConfig, MCPServerList +from strata.mcp_client_manager import MCPClientManager +from strata.server import client_manager as global_client_manager +from strata.tools import ( + TOOL_DISCOVER_SERVER_ACTIONS, + TOOL_EXECUTE_ACTION, + TOOL_GET_ACTION_DETAILS, + TOOL_HANDLE_AUTH_FAILURE, + TOOL_SEARCH_DOCUMENTATION, +) + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def create_test_mcp_server(): + """Create a simple test MCP server that runs in a subprocess.""" + + async def run_test_server(): + """Run a test MCP server with some basic tools.""" + server = Server("test-mcp-server") + + @server.list_tools() + async def list_tools(): + """List available tools.""" + return [ + { + "name": "get_time", + "description": "Get the current time", + "inputSchema": { + "type": "object", + "properties": { + "timezone": { + "type": "string", + "description": "Timezone (e.g., UTC, EST)", + } + }, + }, + }, + { + "name": "echo", + "description": "Echo back the input message", + "inputSchema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Message to echo", + } + }, + "required": ["message"], + }, + }, + { + "name": "calculate", + "description": "Perform a simple calculation", + "inputSchema": { + "type": "object", + "properties": { + "operation": { + "type": "string", + "enum": ["add", "subtract", "multiply", "divide"], + "description": "Operation to perform", + }, + "a": {"type": "number", "description": "First number"}, + "b": {"type": "number", "description": "Second number"}, + }, + "required": ["operation", "a", "b"], + }, + }, + ] + + @server.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]): + """Handle tool calls.""" + if name == "get_time": + import datetime + + timezone = arguments.get("timezone", "UTC") + return [ + { + "type": "text", + "text": json.dumps( + { + "time": datetime.datetime.now().isoformat(), + "timezone": timezone, + } + ), + } + ] + elif name == "echo": + message = arguments.get("message", "") + return [{"type": "text", "text": json.dumps({"echoed": message})}] + elif name == "calculate": + operation = arguments.get("operation") + a = arguments.get("a", 0) + b = arguments.get("b", 0) + + result = 0 + if operation == "add": + result = a + b + elif operation == "subtract": + result = a - b + elif operation == "multiply": + result = a * b + elif operation == "divide": + result = a / b if b != 0 else "Error: Division by zero" + + return [ + { + "type": "text", + "text": json.dumps( + {"result": result, "operation": operation, "a": a, "b": b} + ), + } + ] + else: + return [ + { + "type": "text", + "text": json.dumps({"error": f"Unknown tool: {name}"}), + } + ] + + # Run the server + await stdio_server(server) + + # Run in asyncio + asyncio.run(run_test_server()) + + +async def call_server_tool(tool_name: str, arguments: dict): + """Helper function to call server tools directly.""" + # Use the global client_manager from server.py + client_manager = global_client_manager + + # Import the actual implementation from utils + from strata.utils.shared_search import UniversalToolSearcher + + result = None + + if tool_name == TOOL_DISCOVER_SERVER_ACTIONS: + user_query = arguments.get("user_query") + server_names = arguments.get("server_names") + + # If no server names provided, use all available servers + if not server_names: + server_names = list(client_manager.active_clients.keys()) + + # Discover actions from specified servers + discovery_result = {} + for server_name in server_names: + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + # Filter tools based on user query if provided + if user_query and tools: + # Build search index for this server's tools + tools_map = {server_name: tools} + searcher = UniversalToolSearcher(tools_map) + search_results = searcher.search(user_query, max_results=50) + + # Convert search results back to tool list + filtered_tools = [] + for result_item in search_results: + for tool in tools: + if tool["name"] == result_item["name"]: + filtered_tools.append( + { + "name": tool["name"], + "description": tool["description"], + "inputSchema": tool["inputSchema"], + } + ) + break + discovery_result[server_name] = filtered_tools + else: + # Return all tools if no query + discovery_result[server_name] = [ + { + "name": tool["name"], + "description": tool["description"], + "inputSchema": tool["inputSchema"], + } + for tool in (tools or []) + ] + except KeyError: + discovery_result[server_name] = { + "error": f"Server '{server_name}' not found or not connected" + } + except Exception as e: + logger.error(f"Error discovering actions from {server_name}: {str(e)}") + discovery_result[server_name] = {"error": str(e)} + + result = {"servers": discovery_result} + + elif tool_name == TOOL_GET_ACTION_DETAILS: + server_name = arguments.get("server_name") + action_name = arguments.get("action_name") + + if not server_name or not action_name: + return [ + { + "type": "text", + "text": "Error: Both server_name and action_name are required", + } + ] + + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + action_found = None + for tool in tools or []: + if tool["name"] == action_name: + action_found = tool + break + + if action_found: + result = { + "server": server_name, + "action": { + "name": action_found["name"], + "description": action_found["description"], + "inputSchema": action_found["inputSchema"], + }, + } + else: + result = { + "error": f"Action '{action_name}' not found in server '{server_name}'" + } + except Exception as e: + result = {"error": f"Error: {str(e)}"} + + elif tool_name == TOOL_EXECUTE_ACTION: + server_name = arguments.get("server_name") + action_name = arguments.get("action_name") + + if not all([server_name, action_name]): + return [ + { + "type": "text", + "text": "Error: server_name and action_name are required", + } + ] + + try: + client = client_manager.get_client(server_name) + action_params = {} + + # Parse parameters + for param_name in ["path_params", "query_params", "body_schema"]: + param_value = arguments.get(param_name) + if param_value: + if isinstance(param_value, str): + try: + action_params.update(json.loads(param_value)) + except json.JSONDecodeError: + return [ + { + "type": "text", + "text": f"Error: Invalid JSON in {param_name}", + } + ] + else: + action_params.update(param_value) + + # Call the tool + tool_result = await client.call_tool(action_name, action_params) + + # Process result + if tool_result: + if isinstance(tool_result, list): + extracted = [] + for item in tool_result: + if hasattr(item, "text"): + extracted.append(item.text) + else: + extracted.append(str(item)) + result_text = "\n".join(extracted) + try: + result = json.loads(result_text) + except: + result = {"output": result_text} + else: + result = {"output": str(tool_result)} + else: + result = {"output": "Action executed successfully"} + except Exception as e: + result = {"error": str(e)} + + elif tool_name == TOOL_SEARCH_DOCUMENTATION: + query = arguments.get("query") + server_name = arguments.get("server_name") + max_results = arguments.get("max_results", 10) + + if not query or not server_name: + return [ + { + "type": "text", + "text": "Error: Both query and server_name are required", + } + ] + + try: + client = client_manager.get_client(server_name) + tools = await client.list_tools() + + # Build search index + tools_map = {server_name: tools if tools else []} + searcher = UniversalToolSearcher(tools_map) + result = searcher.search(query, max_results=max_results) + except Exception as e: + result = [{"error": str(e)}] + + elif tool_name == TOOL_HANDLE_AUTH_FAILURE: + server_name = arguments.get("server_name") + intention = arguments.get("intention") + auth_data = arguments.get("auth_data") + + if not server_name or not intention: + return [ + { + "type": "text", + "text": "Error: Both server_name and intention are required", + } + ] + + if intention == "get_auth_url": + result = { + "server": server_name, + "message": f"Authentication required for server '{server_name}'", + "instructions": "Please provide authentication credentials", + "required_fields": {"token": "Authentication token or API key"}, + } + elif intention == "save_auth_data": + if not auth_data: + return [ + { + "type": "text", + "text": "Error: auth_data is required when intention is 'save_auth_data'", + } + ] + result = { + "server": server_name, + "status": "success", + "message": f"Authentication data saved for server '{server_name}'", + } + else: + result = {"error": f"Invalid intention: '{intention}'"} + + else: + return [{"type": "text", "text": f"Unknown tool: {tool_name}"}] + + # Return formatted result + return [{"type": "text", "text": json.dumps(result) if result else "{}"}] + + +class TestServerToolCalls: + """Test all server tool_calls with real MCP connections.""" + + @pytest_asyncio.fixture + async def setup_test_environment(self, tmp_path): + """Set up test environment with a real MCP server.""" + # Create a test config directory + config_dir = tmp_path / "config" + config_dir.mkdir() + + # Create test MCP server config + config = MCPServerList() + + # Add GitHub MCP server (a real server we can test with) + github_server = MCPServerConfig( + name="github", + command="npx", + args=["-y", "@modelcontextprotocol/server-github"], + # Will work for listing tools + env={"GITHUB_PERSONAL_ACCESS_TOKEN": "test_token"}, + enabled=True, + ) + config.add_server(github_server) + + # Save config + config.save() + + # Initialize the global client manager with test config + await global_client_manager.initialize_from_config() + + yield global_client_manager + + # Cleanup - just clear the active clients + global_client_manager.active_clients.clear() + + @pytest.mark.asyncio + async def test_discover_server_actions_all_servers(self, setup_test_environment): + """Test TOOL_DISCOVER_SERVER_ACTIONS with all available servers.""" + manager = setup_test_environment + + # Test discovering actions from all servers (server_names not provided) + try: + result = await call_server_tool( + TOOL_DISCOVER_SERVER_ACTIONS, {"user_query": ""} # No filter + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + # Parse the JSON response + data = json.loads(result[0]["text"]) + assert "servers" in data + + # Should have discovered actions from connected servers + if data["servers"]: + for server_name, actions in data["servers"].items(): + logger.info( + f"Server {server_name}: {len(actions) if isinstance(actions, list) else actions} actions" + ) + if isinstance(actions, list): + assert len(actions) >= 0 # May have 0 or more actions + elif isinstance(actions, dict) and "error" in actions: + # Server might not be connected + logger.warning( + f"Server {server_name} error: {actions['error']}" + ) + finally: + pass + + @pytest.mark.asyncio + async def test_discover_server_actions_with_query(self, setup_test_environment): + """Test TOOL_DISCOVER_SERVER_ACTIONS with search query.""" + manager = setup_test_environment + + # Test discovering actions with a search query + try: + result = await call_server_tool( + TOOL_DISCOVER_SERVER_ACTIONS, + { + "user_query": "repository", # Search for repository-related actions + "server_names": ["github"], + }, + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + data = json.loads(result[0]["text"]) + assert "servers" in data + + if "github" in data["servers"] and isinstance( + data["servers"]["github"], list + ): + # Should find repository-related actions if GitHub server is connected + for action in data["servers"]["github"]: + logger.info(f"Found action: {action.get('name')}") + finally: + pass + + @pytest.mark.asyncio + async def test_get_action_details(self, setup_test_environment): + """Test TOOL_GET_ACTION_DETAILS.""" + manager = setup_test_environment + + # First discover available actions + try: + discover_result = await call_server_tool( + TOOL_DISCOVER_SERVER_ACTIONS, + {"user_query": "", "server_names": ["github"]}, + ) + + discover_data = json.loads(discover_result[0]["text"]) + + # Get details for a specific action if any were found + if "github" in discover_data["servers"] and isinstance( + discover_data["servers"]["github"], dict + ): + github_data = discover_data["servers"]["github"] + if "actions" in github_data and github_data["actions"]: + action_name = github_data["actions"][0] + + # Get action details + result = await call_server_tool( + TOOL_GET_ACTION_DETAILS, + {"server_name": "github", "action_name": action_name}, + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + data = json.loads(result[0]["text"]) + + if "error" not in data: + assert "server" in data + assert "action" in data + assert data["server"] == "github" + assert data["action"]["name"] == action_name + assert "inputSchema" in data["action"] + logger.info(f"Got details for action: {action_name}") + + # Test with non-existent action + result = await call_server_tool( + TOOL_GET_ACTION_DETAILS, + {"server_name": "github", "action_name": "non_existent_action_xyz"}, + ) + + assert len(result) == 1 + data = json.loads(result[0]["text"]) + assert "error" in data + assert "not found" in data["error"].lower() + + finally: + pass + + @pytest.mark.asyncio + async def test_execute_action(self, setup_test_environment): + """Test TOOL_EXECUTE_ACTION.""" + manager = setup_test_environment + + # Note: We can't actually execute GitHub actions without proper auth, + # but we can test the error handling + try: + result = await call_server_tool( + TOOL_EXECUTE_ACTION, + { + "server_name": "github", + "action_name": "create_issue", + "body_schema": json.dumps( + { + "owner": "test", + "repo": "test", + "title": "Test Issue", + "body": "This is a test", + } + ), + }, + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + data = json.loads(result[0]["text"]) + # Will likely get an auth error or similar + logger.info(f"Execute action result: {data}") + + # Test with invalid JSON parameters + result = await call_server_tool( + TOOL_EXECUTE_ACTION, + { + "server_name": "github", + "action_name": "test_action", + "body_schema": "invalid json {", + }, + ) + + assert len(result) == 1 + assert "Invalid JSON" in result[0]["text"] + + finally: + pass + + @pytest.mark.asyncio + async def test_search_documentation(self, setup_test_environment): + """Test TOOL_SEARCH_DOCUMENTATION.""" + manager = setup_test_environment + + # Search for specific documentation + try: + result = await call_server_tool( + TOOL_SEARCH_DOCUMENTATION, + {"query": "repository", "server_name": "github", "max_results": 5}, + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + data = json.loads(result[0]["text"]) + assert isinstance(data, list) + + if data and not any( + "error" in item for item in data if isinstance(item, dict) + ): + # Found some results + for item in data[:3]: # Log first 3 results + logger.info(f"Search result: {item.get('name', item)}") + + # Test with missing parameters + result = await call_server_tool( + TOOL_SEARCH_DOCUMENTATION, + { + "query": "test" + # Missing server_name + }, + ) + + assert len(result) == 1 + assert "required" in result[0]["text"].lower() + + finally: + pass + + @pytest.mark.asyncio + async def test_handle_auth_failure(self, setup_test_environment): + """Test TOOL_HANDLE_AUTH_FAILURE.""" + manager = setup_test_environment + + # Test get_auth_url intention + try: + result = await call_server_tool( + TOOL_HANDLE_AUTH_FAILURE, + {"server_name": "github", "intention": "get_auth_url"}, + ) + + assert len(result) == 1 + assert result[0]["type"] == "text" + + data = json.loads(result[0]["text"]) + assert "server" in data + assert data["server"] == "github" + assert "instructions" in data + assert "required_fields" in data + logger.info(f"Auth instructions: {data['message']}") + + # Test save_auth_data intention + result = await call_server_tool( + TOOL_HANDLE_AUTH_FAILURE, + { + "server_name": "github", + "intention": "save_auth_data", + "auth_data": {"token": "test_token_123"}, + }, + ) + + assert len(result) == 1 + data = json.loads(result[0]["text"]) + assert "status" in data + assert data["status"] == "success" + logger.info(f"Auth save result: {data['message']}") + + # Test invalid intention + result = await call_server_tool( + TOOL_HANDLE_AUTH_FAILURE, + {"server_name": "github", "intention": "invalid_intention"}, + ) + + assert len(result) == 1 + data = json.loads(result[0]["text"]) + assert "error" in data + assert "Invalid intention" in data["error"] + + # Test missing auth_data for save_auth_data + result = await call_server_tool( + TOOL_HANDLE_AUTH_FAILURE, + { + "server_name": "github", + "intention": "save_auth_data", + # Missing auth_data + }, + ) + + assert len(result) == 1 + assert "auth_data is required" in result[0]["text"] + + finally: + pass + + @pytest.mark.asyncio + async def test_error_handling(self, setup_test_environment): + """Test error handling for various edge cases.""" + manager = setup_test_environment + + # Test with non-existent server + try: + result = await call_server_tool( + TOOL_GET_ACTION_DETAILS, + {"server_name": "non_existent_server", "action_name": "test"}, + ) + + assert len(result) == 1 + data = json.loads(result[0]["text"]) + assert "error" in data + # The error message contains the server name + assert "non_existent_server" in data["error"].lower() + + # Test unknown tool + result = await call_server_tool("unknown_tool_name", {}) + + assert len(result) == 1 + assert "Unknown tool" in result[0]["text"] + + finally: + pass + + +@pytest.mark.integration +class TestServerIntegrationWithRealServer: + """Integration tests that require a real MCP server running.""" + + @pytest.mark.asyncio + async def test_full_workflow(self): + """Test complete workflow: discover -> get details -> execute.""" + # This test requires proper setup of MCP servers + # It's marked as integration test and can be skipped in CI + + # Create manager + manager = MCPClientManager() + await manager.initialize_from_config() + + # Update the global client manager to use our manager + global global_client_manager + original_manager = global_client_manager + global_client_manager.active_clients = manager.active_clients + global_client_manager.server_list = manager.server_list + + try: + # 1. Discover available servers and actions + discover_result = await call_server_tool( + TOOL_DISCOVER_SERVER_ACTIONS, {"user_query": ""} + ) + + discover_data = json.loads(discover_result[0]["text"]) + logger.info(f"Discovered servers: {list(discover_data['servers'].keys())}") + + # 2. For each server with actions, get details of first action + for server_name, server_data in discover_data["servers"].items(): + if ( + isinstance(server_data, dict) + and "actions" in server_data + and server_data["actions"] + ): + first_action_name = server_data["actions"][0] + logger.info( + f"\nServer: {server_name}, Testing action: {first_action_name}" + ) + + # Get action details + details_result = await call_server_tool( + TOOL_GET_ACTION_DETAILS, + { + "server_name": server_name, + "action_name": first_action_name, + }, + ) + + details_data = json.loads(details_result[0]["text"]) + if "error" not in details_data: + logger.info(f"Action details retrieved successfully") + logger.info( + f"Input schema: {details_data['action'].get('inputSchema', {})}" + ) + + # 3. Search for related documentation + search_result = await call_server_tool( + TOOL_SEARCH_DOCUMENTATION, + { + "query": first_action_name, + "server_name": server_name, + "max_results": 3, + }, + ) + + search_data = json.loads(search_result[0]["text"]) + logger.info(f"Search found {len(search_data)} results") + + break # Test with first server only + + finally: + # Cleanup manager + await manager.disconnect_all() + + +if __name__ == "__main__": + # Run tests + pytest.main([__file__, "-v", "-s"]) diff --git a/open-strata/tests/test_tool_integration.py b/open-strata/tests/test_tool_integration.py new file mode 100644 index 00000000..b152a1b8 --- /dev/null +++ b/open-strata/tests/test_tool_integration.py @@ -0,0 +1,131 @@ +"""Tests for tool integration utilities.""" + +import json +import tempfile +from pathlib import Path +from unittest.mock import patch + +from strata.utils.tool_integration import ( + add_strata_to_cursor, + ensure_json_config, + save_json_config, + update_json_recursively, +) + + +class TestToolIntegration: + """Test tool integration utilities.""" + + def test_update_json_recursively_simple(self): + """Test updating a simple nested dictionary.""" + data = {} + result = update_json_recursively(data, ["key"], "value") + assert result["key"] == "value" + + def test_update_json_recursively_nested(self): + """Test updating a deeply nested dictionary.""" + data = {} + result = update_json_recursively(data, ["level1", "level2", "key"], "value") + assert result["level1"]["level2"]["key"] == "value" + + def test_update_json_recursively_merge_dict(self): + """Test merging dictionaries.""" + data = {"existing": {"keep": "this"}} + new_value = {"new": "data"} + result = update_json_recursively(data, ["existing"], new_value) + + assert result["existing"]["keep"] == "this" + assert result["existing"]["new"] == "data" + + def test_update_json_recursively_overwrite_existing(self): + """Test overwriting existing keys.""" + data = {"existing": {"old": "value"}} + result = update_json_recursively(data, ["existing", "old"], "new_value") + assert result["existing"]["old"] == "new_value" + + def test_ensure_json_config_new_file(self): + """Test creating new config file.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_path = Path(temp_dir) / "test.json" + config = ensure_json_config(config_path) + + assert isinstance(config, dict) + assert len(config) == 0 + + def test_ensure_json_config_existing_file(self): + """Test reading existing config file.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_path = Path(temp_dir) / "test.json" + + # Create existing config + existing_data = {"existing": "data"} + with open(config_path, "w") as f: + json.dump(existing_data, f) + + config = ensure_json_config(config_path) + assert config["existing"] == "data" + + def test_save_json_config(self): + """Test saving JSON config.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_path = Path(temp_dir) / "test.json" + data = {"test": "data"} + + save_json_config(config_path, data) + + assert config_path.exists() + with open(config_path, "r") as f: + saved_data = json.load(f) + + assert saved_data["test"] == "data" + + def test_add_strata_to_cursor_user_scope(self): + """Test adding Strata to Cursor user scope.""" + with tempfile.TemporaryDirectory() as temp_dir: + with patch("pathlib.Path.home", return_value=Path(temp_dir)): + result = add_strata_to_cursor("user") + + assert result == 0 + + # Check config was created + config_path = Path(temp_dir) / ".cursor" / "mcp.json" + assert config_path.exists() + + with open(config_path, "r") as f: + config = json.load(f) + + assert "mcpServers" in config + assert "strata" in config["mcpServers"] + assert config["mcpServers"]["strata"]["command"] == "strata" + + def test_add_strata_to_cursor_project_scope(self): + """Test adding Strata to Cursor project scope.""" + with tempfile.TemporaryDirectory() as temp_dir: + original_cwd = Path.cwd() + try: + import os + + os.chdir(temp_dir) + + result = add_strata_to_cursor("project") + + assert result == 0 + + # Check config was created + config_path = Path(temp_dir) / ".cursor" / "mcp.json" + assert config_path.exists() + + with open(config_path, "r") as f: + config = json.load(f) + + assert "mcpServers" in config + assert "strata" in config["mcpServers"] + assert config["mcpServers"]["strata"]["command"] == "strata" + + finally: + os.chdir(original_cwd) + + def test_add_strata_to_cursor_invalid_scope(self): + """Test adding Strata to Cursor with invalid scope.""" + result = add_strata_to_cursor("invalid") + assert result == 1 diff --git a/open-strata/uv.lock b/open-strata/uv.lock new file mode 100644 index 00000000..299c281b --- /dev/null +++ b/open-strata/uv.lock @@ -0,0 +1,1112 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version < '3.11'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "bm25s" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.16.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/72/5ad06c30991ba494242785a3ab8987deb01c07dfc1c492847bde221e62bf/bm25s-0.2.14.tar.gz", hash = "sha256:7b6717770fffbdb3b962e5fe8ef1e6eac7f285d0fbc14484b321e136df837139", size = 59266, upload-time = "2025-09-08T17:06:30.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/3e/e3ae2f0fb0f8f46f9c787fa419ca5203ff850d0630749a26baf0a6570453/bm25s-0.2.14-py3-none-any.whl", hash = "sha256:76cdb70ae40747941b150a1ec16a9c20c576d6534d0a3c3eebb303c779b3cf65", size = 55128, upload-time = "2025-09-08T17:06:29.324Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hatchling" +version = "1.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pathspec" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "trove-classifiers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/8a/cc1debe3514da292094f1c3a700e4ca25442489731ef7c0814358816bb03/hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6", size = 54983, upload-time = "2024-12-15T17:08:11.894Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/e7/ae38d7a6dfba0533684e0b2136817d667588ae3ec984c1a4e5df5eb88482/hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b", size = 75794, upload-time = "2024-12-15T17:08:10.364Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/fd/d6e941a52446198b73e5e4a953441f667f1469aeb06fb382d9f6729d6168/mcp-1.14.0.tar.gz", hash = "sha256:2e7d98b195e08b2abc1dc6191f6f3dc0059604ac13ee6a40f88676274787fac4", size = 454855, upload-time = "2025-09-11T17:40:48.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/7b/84b0dd4c2c5a499d2c5d63fb7a1224c25fc4c8b6c24623fa7a566471480d/mcp-1.14.0-py3-none-any.whl", hash = "sha256:b2d27feba27b4c53d41b58aa7f4d090ae0cb740cbc4e339af10f8cbe54c4e19d", size = 163805, upload-time = "2025-09-11T17:40:46.891Z" }, +] + +[[package]] +name = "numpy" +version = "2.2.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", size = 21165245, upload-time = "2025-05-17T21:27:58.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", size = 14360048, upload-time = "2025-05-17T21:28:21.406Z" }, + { url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", size = 5340542, upload-time = "2025-05-17T21:28:30.931Z" }, + { url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", size = 6878301, upload-time = "2025-05-17T21:28:41.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" }, + { url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", size = 6527149, upload-time = "2025-05-17T21:30:29.788Z" }, + { url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", size = 12904620, upload-time = "2025-05-17T21:30:48.994Z" }, + { url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963, upload-time = "2025-05-17T21:31:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743, upload-time = "2025-05-17T21:31:41.087Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616, upload-time = "2025-05-17T21:31:50.072Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579, upload-time = "2025-05-17T21:32:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" }, + { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" }, + { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866, upload-time = "2025-05-17T21:33:50.273Z" }, + { url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455, upload-time = "2025-05-17T21:34:09.135Z" }, + { url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348, upload-time = "2025-05-17T21:34:39.648Z" }, + { url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362, upload-time = "2025-05-17T21:35:01.241Z" }, + { url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103, upload-time = "2025-05-17T21:35:10.622Z" }, + { url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382, upload-time = "2025-05-17T21:35:21.414Z" }, + { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" }, + { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" }, + { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" }, + { url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506, upload-time = "2025-05-17T21:37:07.368Z" }, + { url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190, upload-time = "2025-05-17T21:37:26.213Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", size = 20867828, upload-time = "2025-05-17T21:37:56.699Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", size = 14143006, upload-time = "2025-05-17T21:38:18.291Z" }, + { url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", size = 5076765, upload-time = "2025-05-17T21:38:27.319Z" }, + { url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", size = 6617736, upload-time = "2025-05-17T21:38:38.141Z" }, + { url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719, upload-time = "2025-05-17T21:38:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072, upload-time = "2025-05-17T21:39:22.638Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213, upload-time = "2025-05-17T21:39:45.865Z" }, + { url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632, upload-time = "2025-05-17T21:40:13.331Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", size = 6244532, upload-time = "2025-05-17T21:43:46.099Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", size = 12610885, upload-time = "2025-05-17T21:44:05.145Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", size = 20963467, upload-time = "2025-05-17T21:40:44Z" }, + { url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", size = 14225144, upload-time = "2025-05-17T21:41:05.695Z" }, + { url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", size = 5200217, upload-time = "2025-05-17T21:41:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8", size = 6712014, upload-time = "2025-05-17T21:41:27.321Z" }, + { url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935, upload-time = "2025-05-17T21:41:49.738Z" }, + { url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122, upload-time = "2025-05-17T21:42:14.046Z" }, + { url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143, upload-time = "2025-05-17T21:42:37.464Z" }, + { url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260, upload-time = "2025-05-17T21:43:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", size = 6377225, upload-time = "2025-05-17T21:43:16.254Z" }, + { url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", size = 12771374, upload-time = "2025-05-17T21:43:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", size = 21040391, upload-time = "2025-05-17T21:44:35.948Z" }, + { url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", size = 6786754, upload-time = "2025-05-17T21:44:47.446Z" }, + { url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" }, + { url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", size = 12812666, upload-time = "2025-05-17T21:45:31.426Z" }, +] + +[[package]] +name = "numpy" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/45/e80d203ef6b267aa29b22714fb558930b27960a0c5ce3c19c999232bb3eb/numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d", size = 21259253, upload-time = "2025-09-09T15:56:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/52/18/cf2c648fccf339e59302e00e5f2bc87725a3ce1992f30f3f78c9044d7c43/numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569", size = 14450980, upload-time = "2025-09-09T15:56:05.926Z" }, + { url = "https://files.pythonhosted.org/packages/93/fb/9af1082bec870188c42a1c239839915b74a5099c392389ff04215dcee812/numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f", size = 5379709, upload-time = "2025-09-09T15:56:07.95Z" }, + { url = "https://files.pythonhosted.org/packages/75/0f/bfd7abca52bcbf9a4a65abc83fe18ef01ccdeb37bfb28bbd6ad613447c79/numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125", size = 6913923, upload-time = "2025-09-09T15:56:09.443Z" }, + { url = "https://files.pythonhosted.org/packages/79/55/d69adad255e87ab7afda1caf93ca997859092afeb697703e2f010f7c2e55/numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48", size = 14589591, upload-time = "2025-09-09T15:56:11.234Z" }, + { url = "https://files.pythonhosted.org/packages/10/a2/010b0e27ddeacab7839957d7a8f00e91206e0c2c47abbb5f35a2630e5387/numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6", size = 16938714, upload-time = "2025-09-09T15:56:14.637Z" }, + { url = "https://files.pythonhosted.org/packages/1c/6b/12ce8ede632c7126eb2762b9e15e18e204b81725b81f35176eac14dc5b82/numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa", size = 16370592, upload-time = "2025-09-09T15:56:17.285Z" }, + { url = "https://files.pythonhosted.org/packages/b4/35/aba8568b2593067bb6a8fe4c52babb23b4c3b9c80e1b49dff03a09925e4a/numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30", size = 18884474, upload-time = "2025-09-09T15:56:20.943Z" }, + { url = "https://files.pythonhosted.org/packages/45/fa/7f43ba10c77575e8be7b0138d107e4f44ca4a1ef322cd16980ea3e8b8222/numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57", size = 6599794, upload-time = "2025-09-09T15:56:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a2/a4f78cb2241fe5664a22a10332f2be886dcdea8784c9f6a01c272da9b426/numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa", size = 13088104, upload-time = "2025-09-09T15:56:25.476Z" }, + { url = "https://files.pythonhosted.org/packages/79/64/e424e975adbd38282ebcd4891661965b78783de893b381cbc4832fb9beb2/numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7", size = 10460772, upload-time = "2025-09-09T15:56:27.679Z" }, + { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, + { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, + { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, + { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, + { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, + { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, + { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, + { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, + { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, + { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, + { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, + { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, + { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, + { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, + { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, + { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" }, + { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" }, + { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" }, + { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" }, + { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" }, + { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" }, + { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" }, + { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" }, + { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" }, + { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" }, + { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" }, + { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f2/7e0a37cfced2644c9563c529f29fa28acbd0960dde32ece683aafa6f4949/numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e", size = 21131019, upload-time = "2025-09-09T15:58:42.838Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/3291f505297ed63831135a6cc0f474da0c868a1f31b0dd9a9f03a7a0d2ed/numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150", size = 14376288, upload-time = "2025-09-09T15:58:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4b/ae02e985bdeee73d7b5abdefeb98aef1207e96d4c0621ee0cf228ddfac3c/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3", size = 5305425, upload-time = "2025-09-09T15:58:48.6Z" }, + { url = "https://files.pythonhosted.org/packages/8b/eb/9df215d6d7250db32007941500dc51c48190be25f2401d5b2b564e467247/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0", size = 6819053, upload-time = "2025-09-09T15:58:50.401Z" }, + { url = "https://files.pythonhosted.org/packages/57/62/208293d7d6b2a8998a4a1f23ac758648c3c32182d4ce4346062018362e29/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e", size = 14420354, upload-time = "2025-09-09T15:58:52.704Z" }, + { url = "https://files.pythonhosted.org/packages/ed/0c/8e86e0ff7072e14a71b4c6af63175e40d1e7e933ce9b9e9f765a95b4e0c3/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db", size = 16760413, upload-time = "2025-09-09T15:58:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/af/11/0cc63f9f321ccf63886ac203336777140011fb669e739da36d8db3c53b98/numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc", size = 12971844, upload-time = "2025-09-09T15:58:57.359Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pystemmer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/ca/959415b311e05812c1cfe926e21a6c64dd0131b076f258805891c10840e7/pystemmer-3.0.0.tar.gz", hash = "sha256:eb9f31accf188e733f0f375cad1594f7cc21ad86b20977ccce0c7162c7c6c35e", size = 303779, upload-time = "2025-05-08T03:21:40.943Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/94/317c21414b9e8cf60d55820a2a98b5aaceb598ef7bb9d9a2475d4b0e1d77/PyStemmer-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dabc4adba4cf3bd880d5b709b710f98c5034a76bbaa465b054e3b318d5ed544d", size = 673749, upload-time = "2025-05-08T03:22:25.653Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8c/897f7cfd81f058d57cc21e77f078a8142e0eeeb27852e12dbb88d456003d/PyStemmer-3.0.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c87366308fe7298829173833a69fbfc731f19829d4788b6cfc8cfe9eff7f7fe9", size = 709075, upload-time = "2025-05-08T03:22:27.02Z" }, + { url = "https://files.pythonhosted.org/packages/f5/16/f2ef12d8226bb34978c79b95637206d75b9b6d751305cd6fcfc39128cbfd/PyStemmer-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:88d56b84249057907efefd54bc7efa9b84752fcdcd00c3f8e1e73c3157ecb9d0", size = 687611, upload-time = "2025-05-08T03:22:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d6/c6e3cb2f7c1f661ed13d5e6950bb5f720020b516cc24ae10785135db642b/PyStemmer-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e1edc824faf9815b51069137b4ca246f696cb62714aea6444e63b59e35d46aa", size = 720152, upload-time = "2025-05-08T03:22:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/70/d3/35e933a6780026480100b01c2b371673ddad9a43b9c4d59d17597f613e47/PyStemmer-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ec2370337cf7c9c48ce68d926bbef252bc30b9c55b6d97124a0555da644c5e6", size = 692648, upload-time = "2025-05-08T03:22:38.34Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e7/afd155679f0084a5c4f0d8e2a0a9b51a8c8e3f6ab4b1f732acb4147e0291/PyStemmer-3.0.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89a7a6acd77de08fa4b53221eb8c54c53cbc04229cca6ce48bce272b4f1ba98a", size = 731871, upload-time = "2025-05-08T03:22:39.728Z" }, + { url = "https://files.pythonhosted.org/packages/ea/3d/d5c30f4f20fb600915be78444a2295640c9ffa31d5c41ebc175a16228a81/PyStemmer-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e753c9f6b6927c4e874c8e5c1cd401d98b6e7f2979b5b2a540c802bfd019494", size = 701184, upload-time = "2025-05-08T03:22:41.089Z" }, + { url = "https://files.pythonhosted.org/packages/d4/ec/05d0ef2ca5c920f707390129dd143f407a07d06bfa108abe3713bcfe1733/PyStemmer-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c71467e9570bbe7aaf12d0f6deebb45d0a0de68b6b0d545b5a2fe935a03970ea", size = 736541, upload-time = "2025-05-08T03:22:42.403Z" }, + { url = "https://files.pythonhosted.org/packages/e2/98/c618418767da15005100ef1839151b994268a0b27eaf17ed1d455710192a/PyStemmer-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0497993de48ba98ce2e6b31ace4a1f5af5c24630d15b997e9be1a1049ee07be", size = 703563, upload-time = "2025-05-08T03:22:49.067Z" }, + { url = "https://files.pythonhosted.org/packages/1a/e2/b65a69b8714f658b7d50094fe88889c1cb89ca9a7b6dbc10c4fcf3a62642/PyStemmer-3.0.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a61d4b74b87a72d3a3bb1d1a28ab5fbacd2835e059049111185b3023d64bddd6", size = 745290, upload-time = "2025-05-08T03:22:51.252Z" }, + { url = "https://files.pythonhosted.org/packages/88/1e/aa0e3e1450f0efee5f5af7f1046183db1ff5f1d0873b7cec39d9339d2e99/PyStemmer-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e9fc61a241df3c754c16f44acf523a67c16ecf7a0af14d339649a89f5a1ed669", size = 709919, upload-time = "2025-05-08T03:22:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/ef/0b/78b293b06646e480aeaa0f2c9c9ce67859e178a9a4fd52d894d9fdfdf3b1/PyStemmer-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:076099cb664d9ef245d0b2ef3378f00e01185985c79c6e6316d570a76c15fadf", size = 745226, upload-time = "2025-05-08T03:22:53.877Z" }, + { url = "https://files.pythonhosted.org/packages/67/e5/e2cfff13764429cf6f93a6f9c7deb6cea8bd2606e4eebd8037a7714fa0bd/PyStemmer-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2346cfda0f5490ef595b2baf0ba3dc23b80eb2d4d47e69e76071af173da0ccc3", size = 238222, upload-time = "2025-05-08T03:23:54.419Z" }, + { url = "https://files.pythonhosted.org/packages/31/e8/1382901420864a746817efa8932c327447340251921227c26242071298f1/PyStemmer-3.0.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb1a9666f1964635170d5abe44beb2d6f33894aa49b18037ac2340eb0a53e14e", size = 247100, upload-time = "2025-05-08T03:23:55.623Z" }, + { url = "https://files.pythonhosted.org/packages/9b/28/6a5ce468710fb51e1e09da7dd289e45b638f586979974266d8f3366f82dd/pystemmer-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0887a76bb1574aa615cc7a8ddbae386a6713a6e470ed6866a1cf222a1ad0aca4", size = 234868, upload-time = "2025-05-08T03:22:22.351Z" }, + { url = "https://files.pythonhosted.org/packages/a9/34/739cda0b1039edfd1c552b85b67790be38b97adeccae318351c6b87cb500/pystemmer-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:849013b9ac5739d6782df8d98cbdf3910b815386aa97a8d8bd35e379d79d9988", size = 240155, upload-time = "2025-05-08T03:22:24.117Z" }, + { url = "https://files.pythonhosted.org/packages/e0/90/c5812ef170bbf685cd111798a202b7ba4eaf4619b84d3aff382cb70a965e/pystemmer-3.0.0-cp310-cp310-win32.whl", hash = "sha256:e7b31a9f67c6b5abb1557f1d59b7f30d90743175c5185f18caf7c15e27c49734", size = 152011, upload-time = "2025-05-08T03:22:31.691Z" }, + { url = "https://files.pythonhosted.org/packages/b4/47/f460c6c58c0288ef0a6ae32ea36289434b76a79967d543b13ee9f7d7105e/pystemmer-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:78d8909d4011f5300a052653497364e199bf3ff42fd60d5495e4bfb803dc8e45", size = 201464, upload-time = "2025-05-08T03:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/86/90/21af7ab76cb240bc87377501b5fcc4b5f883950e030a85819f3d062f3d09/pystemmer-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aaf3bf810f33ec6b2e907157a54e7d8b8179060fc02d27a9b14d7845244c434c", size = 234939, upload-time = "2025-05-08T03:22:34.893Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f8/b9cfedece2d5cf0487f5cb4403bde8181bcae95594d444b9b437d82debd3/pystemmer-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3d908c8e8e61fb392cc34e063bd225b59f3809d3b6fdb25ffc49f652695bd46", size = 240111, upload-time = "2025-05-08T03:22:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/bd/37/c5d741da5e1e246b5eadb8550395c2e22d973753a4c8588249a49046f1e2/pystemmer-3.0.0-cp311-cp311-win32.whl", hash = "sha256:8df0a5e8cfb6577ae201bd6bbd1ecee76519459064e3ff886779ca5182f9b59c", size = 151662, upload-time = "2025-05-08T03:22:43.757Z" }, + { url = "https://files.pythonhosted.org/packages/a0/11/296d2008b9885c91b969822bb6ad5b3e56a59afc0732f276d4a4930d195a/pystemmer-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:cf8717e0fa0975daaffe14cc0fa4ba1c66cc00daf7b7bb9afcdc2c6159e9f5d4", size = 201435, upload-time = "2025-05-08T03:22:44.935Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/f1f838290cb7f78725cf088c9d5a78fa729e86086f8932f61261e2b06515/pystemmer-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:88c3e9dddc97d6697cb086d66c8655e2ed051acacffb479cc6423e56b1413b66", size = 235481, upload-time = "2025-05-08T03:22:46.323Z" }, + { url = "https://files.pythonhosted.org/packages/91/b6/57cc12e2d2292846056020716b65a8f1dd5dc03c83cb60143cf019e94a67/pystemmer-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78555f22dd211b220c421b5edafb5f81d2ddb259c668b347aed6853da3695c6d", size = 240735, upload-time = "2025-05-08T03:22:47.589Z" }, + { url = "https://files.pythonhosted.org/packages/83/3c/1102c4894943b01e24918ee701b5584e87bf27fa850df1b08a59a8af03fe/pystemmer-3.0.0-cp312-cp312-win32.whl", hash = "sha256:f6f3db949aa4e0f97be9b800bf1c026229f373e39ebfead8975569a87af39f29", size = 152178, upload-time = "2025-05-08T03:22:55.285Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/cbdef54847c05a4afddfcef76750c9d43768437af22b169be881df95c5ca/pystemmer-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:62ad69d85819082e77e720f25818a5219c5429d508ccdcf5da37405c92ca4a30", size = 201922, upload-time = "2025-05-08T03:22:58.194Z" }, + { url = "https://files.pythonhosted.org/packages/4d/50/ed9a6657aba3a16acd1959b5c917320411897f4f34435556a32050200699/pystemmer-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1a48f350ad92c3b6e05295ae5fcb710fdb15203b15b8590cd7fb49d604385abe", size = 235051, upload-time = "2025-05-08T03:22:59.873Z" }, + { url = "https://files.pythonhosted.org/packages/80/3c/f5065f0bedef2c3ca56adbfa02e51cab0337f9420844ddc50701163f3851/pystemmer-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:159e9b6b85da4accbae1223b486413e4293f7736cc9fd124c170e78ee49d10bc", size = 240131, upload-time = "2025-05-08T03:23:01.043Z" }, + { url = "https://files.pythonhosted.org/packages/fe/24/53a35cb53b0018cb60fcb5c3d1257b02c93de24d6357811152b53b684909/pystemmer-3.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6764805aa21d302b4471a956ea6a1df496d363dfd39bcbe28d03ca8e7d4f7fdd", size = 698877, upload-time = "2025-05-08T03:23:02.313Z" }, + { url = "https://files.pythonhosted.org/packages/75/23/349f013d45a1410030412265fa1d7f67b5b31cd0026a955b0f710bb66cab/pystemmer-3.0.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d77852e6a7729c6606da7daa914f3ab29c9d5cb4b74dc05f5e1fc0947551d62", size = 739678, upload-time = "2025-05-08T03:23:03.672Z" }, + { url = "https://files.pythonhosted.org/packages/74/77/8f68d2965faa9bacef976e476c6e8183ab015e2c147973bfb475886e99c3/pystemmer-3.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e311f34e666c1efe5bd23705dc95d0c832e773c00fdf3b7f065e853bc651af1a", size = 705034, upload-time = "2025-05-08T03:23:04.995Z" }, + { url = "https://files.pythonhosted.org/packages/58/6f/b6d9d3ee1ee202ca7af4396c5eba35be0e3f811802f0f41cbd310bcc3b0c/pystemmer-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3cb3bf0b8b5926423d0675265058889f30c5c37edc45961e9c92622814c8c27", size = 740875, upload-time = "2025-05-08T03:23:06.297Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e7/d24c9c759c89c33567791b237e23d8be306223d18808429ab27003575d29/pystemmer-3.0.0-cp313-cp313-win32.whl", hash = "sha256:9792c3bccf0782017eea951c5aac986f87f16149e9587761522da94d77ab3d79", size = 152119, upload-time = "2025-05-08T03:23:07.945Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0a/23e3895714ab3adb7f96bbb05cf8e02c73d8fd31174eb6b4ef6b5469e671/pystemmer-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:24602d321690946dcb015aa01f6bc9e504eaafd023da23a160c9c16852098a88", size = 201773, upload-time = "2025-05-08T03:23:09.083Z" }, + { url = "https://files.pythonhosted.org/packages/dc/05/443d97600b7d6cc5e87c31783605db2400aaa4157823e4e20638a41d5e7f/pystemmer-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f8f6ac3d031776d4512e7feb0f4411b43dacda3de91c24c418d2cff7e4e2db82", size = 187846, upload-time = "2025-05-08T03:23:51.577Z" }, + { url = "https://files.pythonhosted.org/packages/67/9d/e61425f8221913f21b0045a8a974c932267f407527202e21a51df0db7609/pystemmer-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67e4d3f04829e672399542476163ed2e9067d56159d4a65d87d6839aa0b4fd25", size = 190857, upload-time = "2025-05-08T03:23:53.214Z" }, + { url = "https://files.pythonhosted.org/packages/2e/7e/b47f2a9402a56d4f4d026f570e8f986e805c60bbd7103ccaba89160fa5cc/pystemmer-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:753dde65d031a9181d3d1def6c69cf29f90d749f60dbe257360a2bf52d06870d", size = 196810, upload-time = "2025-05-08T03:23:56.863Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, + { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, + { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, + { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, + { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, + { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, + { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, + { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, + { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, + { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, +] + +[[package]] +name = "scipy" +version = "1.15.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c", size = 38702770, upload-time = "2025-05-08T16:04:20.849Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253", size = 30094511, upload-time = "2025-05-08T16:04:27.103Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f", size = 22368151, upload-time = "2025-05-08T16:04:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92", size = 25121732, upload-time = "2025-05-08T16:04:36.596Z" }, + { url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82", size = 35547617, upload-time = "2025-05-08T16:04:43.546Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40", size = 37662964, upload-time = "2025-05-08T16:04:49.431Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e", size = 37238749, upload-time = "2025-05-08T16:04:55.215Z" }, + { url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c", size = 40022383, upload-time = "2025-05-08T16:05:01.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13", size = 41259201, upload-time = "2025-05-08T16:05:08.166Z" }, + { url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b", size = 38714255, upload-time = "2025-05-08T16:05:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba", size = 30111035, upload-time = "2025-05-08T16:05:20.152Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65", size = 22384499, upload-time = "2025-05-08T16:05:24.494Z" }, + { url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1", size = 25152602, upload-time = "2025-05-08T16:05:29.313Z" }, + { url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889", size = 35503415, upload-time = "2025-05-08T16:05:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982", size = 37652622, upload-time = "2025-05-08T16:05:40.762Z" }, + { url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9", size = 37244796, upload-time = "2025-05-08T16:05:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594", size = 40047684, upload-time = "2025-05-08T16:05:54.22Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb", size = 41246504, upload-time = "2025-05-08T16:06:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019", size = 38766735, upload-time = "2025-05-08T16:06:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6", size = 30173284, upload-time = "2025-05-08T16:06:11.686Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477", size = 22446958, upload-time = "2025-05-08T16:06:15.97Z" }, + { url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c", size = 25242454, upload-time = "2025-05-08T16:06:20.394Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45", size = 35210199, upload-time = "2025-05-08T16:06:26.159Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49", size = 37309455, upload-time = "2025-05-08T16:06:32.778Z" }, + { url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e", size = 36885140, upload-time = "2025-05-08T16:06:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539", size = 39710549, upload-time = "2025-05-08T16:06:45.729Z" }, + { url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed", size = 40966184, upload-time = "2025-05-08T16:06:52.623Z" }, + { url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759", size = 38728256, upload-time = "2025-05-08T16:06:58.696Z" }, + { url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62", size = 30109540, upload-time = "2025-05-08T16:07:04.209Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb", size = 22383115, upload-time = "2025-05-08T16:07:08.998Z" }, + { url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730", size = 25163884, upload-time = "2025-05-08T16:07:14.091Z" }, + { url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018, upload-time = "2025-05-08T16:07:19.427Z" }, + { url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716, upload-time = "2025-05-08T16:07:25.712Z" }, + { url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342, upload-time = "2025-05-08T16:07:31.468Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869, upload-time = "2025-05-08T16:07:38.002Z" }, + { url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163", size = 40988851, upload-time = "2025-05-08T16:08:33.671Z" }, + { url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8", size = 38863011, upload-time = "2025-05-08T16:07:44.039Z" }, + { url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5", size = 30266407, upload-time = "2025-05-08T16:07:49.891Z" }, + { url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e", size = 22540030, upload-time = "2025-05-08T16:07:54.121Z" }, + { url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb", size = 25218709, upload-time = "2025-05-08T16:07:58.506Z" }, + { url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045, upload-time = "2025-05-08T16:08:03.929Z" }, + { url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062, upload-time = "2025-05-08T16:08:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132, upload-time = "2025-05-08T16:08:15.34Z" }, + { url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503, upload-time = "2025-05-08T16:08:21.513Z" }, + { url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca", size = 40308097, upload-time = "2025-05-08T16:08:27.627Z" }, +] + +[[package]] +name = "scipy" +version = "1.16.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/3b/546a6f0bfe791bbb7f8d591613454d15097e53f906308ec6f7c1ce588e8e/scipy-1.16.2.tar.gz", hash = "sha256:af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b", size = 30580599, upload-time = "2025-09-11T17:48:08.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/ef/37ed4b213d64b48422df92560af7300e10fe30b5d665dd79932baebee0c6/scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92", size = 36619956, upload-time = "2025-09-11T17:39:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/85/ab/5c2eba89b9416961a982346a4d6a647d78c91ec96ab94ed522b3b6baf444/scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e", size = 28931117, upload-time = "2025-09-11T17:39:29.06Z" }, + { url = "https://files.pythonhosted.org/packages/80/d1/eed51ab64d227fe60229a2d57fb60ca5898cfa50ba27d4f573e9e5f0b430/scipy-1.16.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:87eb178db04ece7c698220d523c170125dbffebb7af0345e66c3554f6f60c173", size = 20921997, upload-time = "2025-09-11T17:39:34.892Z" }, + { url = "https://files.pythonhosted.org/packages/be/7c/33ea3e23bbadde96726edba6bf9111fb1969d14d9d477ffa202c67bec9da/scipy-1.16.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:4e409eac067dcee96a57fbcf424c13f428037827ec7ee3cb671ff525ca4fc34d", size = 23523374, upload-time = "2025-09-11T17:39:40.846Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/7399dc96e1e3f9a05e258c98d716196a34f528eef2ec55aad651ed136d03/scipy-1.16.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e574be127bb760f0dad24ff6e217c80213d153058372362ccb9555a10fc5e8d2", size = 33583702, upload-time = "2025-09-11T17:39:49.011Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bc/a5c75095089b96ea72c1bd37a4497c24b581ec73db4ef58ebee142ad2d14/scipy-1.16.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5db5ba6188d698ba7abab982ad6973265b74bb40a1efe1821b58c87f73892b9", size = 35883427, upload-time = "2025-09-11T17:39:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/ab/66/e25705ca3d2b87b97fe0a278a24b7f477b4023a926847935a1a71488a6a6/scipy-1.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec6e74c4e884104ae006d34110677bfe0098203a3fec2f3faf349f4cb05165e3", size = 36212940, upload-time = "2025-09-11T17:40:06.013Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fd/0bb911585e12f3abdd603d721d83fc1c7492835e1401a0e6d498d7822b4b/scipy-1.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:912f46667d2d3834bc3d57361f854226475f695eb08c08a904aadb1c936b6a88", size = 38865092, upload-time = "2025-09-11T17:40:15.143Z" }, + { url = "https://files.pythonhosted.org/packages/d6/73/c449a7d56ba6e6f874183759f8483cde21f900a8be117d67ffbb670c2958/scipy-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e9e8a37befa5a69e9cacbe0bcb79ae5afb4a0b130fd6db6ee6cc0d491695fa", size = 38687626, upload-time = "2025-09-11T17:40:24.041Z" }, + { url = "https://files.pythonhosted.org/packages/68/72/02f37316adf95307f5d9e579023c6899f89ff3a051fa079dbd6faafc48e5/scipy-1.16.2-cp311-cp311-win_arm64.whl", hash = "sha256:f3bf75a6dcecab62afde4d1f973f1692be013110cad5338007927db8da73249c", size = 25503506, upload-time = "2025-09-11T17:40:30.703Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8d/6396e00db1282279a4ddd507c5f5e11f606812b608ee58517ce8abbf883f/scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d", size = 36646259, upload-time = "2025-09-11T17:40:39.329Z" }, + { url = "https://files.pythonhosted.org/packages/3b/93/ea9edd7e193fceb8eef149804491890bde73fb169c896b61aa3e2d1e4e77/scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371", size = 28888976, upload-time = "2025-09-11T17:40:46.82Z" }, + { url = "https://files.pythonhosted.org/packages/91/4d/281fddc3d80fd738ba86fd3aed9202331180b01e2c78eaae0642f22f7e83/scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0", size = 20879905, upload-time = "2025-09-11T17:40:52.545Z" }, + { url = "https://files.pythonhosted.org/packages/69/40/b33b74c84606fd301b2915f0062e45733c6ff5708d121dd0deaa8871e2d0/scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232", size = 23553066, upload-time = "2025-09-11T17:40:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/55/a7/22c739e2f21a42cc8f16bc76b47cff4ed54fbe0962832c589591c2abec34/scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1", size = 33336407, upload-time = "2025-09-11T17:41:06.796Z" }, + { url = "https://files.pythonhosted.org/packages/53/11/a0160990b82999b45874dc60c0c183d3a3a969a563fffc476d5a9995c407/scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f", size = 35673281, upload-time = "2025-09-11T17:41:15.055Z" }, + { url = "https://files.pythonhosted.org/packages/96/53/7ef48a4cfcf243c3d0f1643f5887c81f29fdf76911c4e49331828e19fc0a/scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef", size = 36004222, upload-time = "2025-09-11T17:41:23.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/71a69e0afd460049d41c65c630c919c537815277dfea214031005f474d78/scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1", size = 38664586, upload-time = "2025-09-11T17:41:31.021Z" }, + { url = "https://files.pythonhosted.org/packages/34/95/20e02ca66fb495a95fba0642fd48e0c390d0ece9b9b14c6e931a60a12dea/scipy-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e", size = 38550641, upload-time = "2025-09-11T17:41:36.61Z" }, + { url = "https://files.pythonhosted.org/packages/92/ad/13646b9beb0a95528ca46d52b7babafbe115017814a611f2065ee4e61d20/scipy-1.16.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851", size = 25456070, upload-time = "2025-09-11T17:41:41.3Z" }, + { url = "https://files.pythonhosted.org/packages/c1/27/c5b52f1ee81727a9fc457f5ac1e9bf3d6eab311805ea615c83c27ba06400/scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70", size = 36604856, upload-time = "2025-09-11T17:41:47.695Z" }, + { url = "https://files.pythonhosted.org/packages/32/a9/15c20d08e950b540184caa8ced675ba1128accb0e09c653780ba023a4110/scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9", size = 28864626, upload-time = "2025-09-11T17:41:52.642Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/ea36098df653cca26062a627c1a94b0de659e97127c8491e18713ca0e3b9/scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5", size = 20855689, upload-time = "2025-09-11T17:41:57.886Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6f/d0b53be55727f3e6d7c72687ec18ea6d0047cf95f1f77488b99a2bafaee1/scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925", size = 23512151, upload-time = "2025-09-11T17:42:02.303Z" }, + { url = "https://files.pythonhosted.org/packages/11/85/bf7dab56e5c4b1d3d8eef92ca8ede788418ad38a7dc3ff50262f00808760/scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9", size = 33329824, upload-time = "2025-09-11T17:42:07.549Z" }, + { url = "https://files.pythonhosted.org/packages/da/6a/1a927b14ddc7714111ea51f4e568203b2bb6ed59bdd036d62127c1a360c8/scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7", size = 35681881, upload-time = "2025-09-11T17:42:13.255Z" }, + { url = "https://files.pythonhosted.org/packages/c1/5f/331148ea5780b4fcc7007a4a6a6ee0a0c1507a796365cc642d4d226e1c3a/scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb", size = 36006219, upload-time = "2025-09-11T17:42:18.765Z" }, + { url = "https://files.pythonhosted.org/packages/46/3a/e991aa9d2aec723b4a8dcfbfc8365edec5d5e5f9f133888067f1cbb7dfc1/scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e", size = 38682147, upload-time = "2025-09-11T17:42:25.177Z" }, + { url = "https://files.pythonhosted.org/packages/a1/57/0f38e396ad19e41b4c5db66130167eef8ee620a49bc7d0512e3bb67e0cab/scipy-1.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c", size = 38520766, upload-time = "2025-09-11T17:43:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a5/85d3e867b6822d331e26c862a91375bb7746a0b458db5effa093d34cdb89/scipy-1.16.2-cp313-cp313-win_arm64.whl", hash = "sha256:2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104", size = 25451169, upload-time = "2025-09-11T17:43:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/09/d9/60679189bcebda55992d1a45498de6d080dcaf21ce0c8f24f888117e0c2d/scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1", size = 37012682, upload-time = "2025-09-11T17:42:30.677Z" }, + { url = "https://files.pythonhosted.org/packages/83/be/a99d13ee4d3b7887a96f8c71361b9659ba4ef34da0338f14891e102a127f/scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a", size = 29389926, upload-time = "2025-09-11T17:42:35.845Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0a/130164a4881cec6ca8c00faf3b57926f28ed429cd6001a673f83c7c2a579/scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f", size = 21381152, upload-time = "2025-09-11T17:42:40.07Z" }, + { url = "https://files.pythonhosted.org/packages/47/a6/503ffb0310ae77fba874e10cddfc4a1280bdcca1d13c3751b8c3c2996cf8/scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4", size = 23914410, upload-time = "2025-09-11T17:42:44.313Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c7/1147774bcea50d00c02600aadaa919facbd8537997a62496270133536ed6/scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21", size = 33481880, upload-time = "2025-09-11T17:42:49.325Z" }, + { url = "https://files.pythonhosted.org/packages/6a/74/99d5415e4c3e46b2586f30cdbecb95e101c7192628a484a40dd0d163811a/scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7", size = 35791425, upload-time = "2025-09-11T17:42:54.711Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ee/a6559de7c1cc710e938c0355d9d4fbcd732dac4d0d131959d1f3b63eb29c/scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8", size = 36178622, upload-time = "2025-09-11T17:43:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/4e/7b/f127a5795d5ba8ece4e0dce7d4a9fb7cb9e4f4757137757d7a69ab7d4f1a/scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472", size = 38783985, upload-time = "2025-09-11T17:43:06.661Z" }, + { url = "https://files.pythonhosted.org/packages/3e/9f/bc81c1d1e033951eb5912cd3750cc005943afa3e65a725d2443a3b3c4347/scipy-1.16.2-cp313-cp313t-win_amd64.whl", hash = "sha256:116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351", size = 38631367, upload-time = "2025-09-11T17:43:14.44Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5e/2cc7555fd81d01814271412a1d59a289d25f8b63208a0a16c21069d55d3e/scipy-1.16.2-cp313-cp313t-win_arm64.whl", hash = "sha256:98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d", size = 25787992, upload-time = "2025-09-11T17:43:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ac/ad8951250516db71619f0bd3b2eb2448db04b720a003dd98619b78b692c0/scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77", size = 36595109, upload-time = "2025-09-11T17:43:35.713Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f6/5779049ed119c5b503b0f3dc6d6f3f68eefc3a9190d4ad4c276f854f051b/scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70", size = 28859110, upload-time = "2025-09-11T17:43:40.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/09/9986e410ae38bf0a0c737ff8189ac81a93b8e42349aac009891c054403d7/scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88", size = 20850110, upload-time = "2025-09-11T17:43:44.981Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ad/485cdef2d9215e2a7df6d61b81d2ac073dfacf6ae24b9ae87274c4e936ae/scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f", size = 23497014, upload-time = "2025-09-11T17:43:49.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/74/f6a852e5d581122b8f0f831f1d1e32fb8987776ed3658e95c377d308ed86/scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb", size = 33401155, upload-time = "2025-09-11T17:43:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f5/61d243bbc7c6e5e4e13dde9887e84a5cbe9e0f75fd09843044af1590844e/scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7", size = 35691174, upload-time = "2025-09-11T17:44:00.101Z" }, + { url = "https://files.pythonhosted.org/packages/03/99/59933956331f8cc57e406cdb7a483906c74706b156998f322913e789c7e1/scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548", size = 36070752, upload-time = "2025-09-11T17:44:05.619Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7d/00f825cfb47ee19ef74ecf01244b43e95eae74e7e0ff796026ea7cd98456/scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936", size = 38701010, upload-time = "2025-09-11T17:44:11.322Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9f/b62587029980378304ba5a8563d376c96f40b1e133daacee76efdcae32de/scipy-1.16.2-cp314-cp314-win_amd64.whl", hash = "sha256:f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff", size = 39360061, upload-time = "2025-09-11T17:45:09.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/04/7a2f1609921352c7fbee0815811b5050582f67f19983096c4769867ca45f/scipy-1.16.2-cp314-cp314-win_arm64.whl", hash = "sha256:2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d", size = 26126914, upload-time = "2025-09-11T17:45:14.73Z" }, + { url = "https://files.pythonhosted.org/packages/51/b9/60929ce350c16b221928725d2d1d7f86cf96b8bc07415547057d1196dc92/scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8", size = 37013193, upload-time = "2025-09-11T17:44:16.757Z" }, + { url = "https://files.pythonhosted.org/packages/2a/41/ed80e67782d4bc5fc85a966bc356c601afddd175856ba7c7bb6d9490607e/scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4", size = 29390172, upload-time = "2025-09-11T17:44:21.783Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a3/2f673ace4090452696ccded5f5f8efffb353b8f3628f823a110e0170b605/scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831", size = 21381326, upload-time = "2025-09-11T17:44:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/42/bf/59df61c5d51395066c35836b78136accf506197617c8662e60ea209881e1/scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3", size = 23915036, upload-time = "2025-09-11T17:44:30.527Z" }, + { url = "https://files.pythonhosted.org/packages/91/c3/edc7b300dc16847ad3672f1a6f3f7c5d13522b21b84b81c265f4f2760d4a/scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac", size = 33484341, upload-time = "2025-09-11T17:44:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/26/c7/24d1524e72f06ff141e8d04b833c20db3021020563272ccb1b83860082a9/scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374", size = 35790840, upload-time = "2025-09-11T17:44:41.76Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b7/5aaad984eeedd56858dc33d75efa59e8ce798d918e1033ef62d2708f2c3d/scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6", size = 36174716, upload-time = "2025-09-11T17:44:47.316Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c2/e276a237acb09824822b0ada11b028ed4067fdc367a946730979feacb870/scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c", size = 38790088, upload-time = "2025-09-11T17:44:53.011Z" }, + { url = "https://files.pythonhosted.org/packages/c6/b4/5c18a766e8353015439f3780f5fc473f36f9762edc1a2e45da3ff5a31b21/scipy-1.16.2-cp314-cp314t-win_amd64.whl", hash = "sha256:26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9", size = 39457455, upload-time = "2025-09-11T17:44:58.899Z" }, + { url = "https://files.pythonhosted.org/packages/97/30/2f9a5243008f76dfc5dee9a53dfb939d9b31e16ce4bd4f2e628bfc5d89d2/scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779", size = 26448374, upload-time = "2025-09-11T17:45:03.45Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "strata-mcp" +version = "1.0.2" +source = { editable = "." } +dependencies = [ + { name = "bm25s" }, + { name = "mcp" }, + { name = "platformdirs" }, + { name = "pystemmer" }, + { name = "starlette" }, + { name = "uvicorn" }, + { name = "watchgod" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.dev-dependencies] +dev = [ + { name = "hatchling" }, +] + +[package.metadata] +requires-dist = [ + { name = "bm25s", specifier = ">=0.2.14" }, + { name = "mcp", specifier = ">=1.0.0" }, + { name = "platformdirs", specifier = ">=4.4.0" }, + { name = "pystemmer", specifier = ">=3.0.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, + { name = "starlette", specifier = ">=0.37.0" }, + { name = "uvicorn", specifier = ">=0.30.0" }, + { name = "watchgod", specifier = ">=0.7" }, +] +provides-extras = ["dev"] + +[package.metadata.requires-dev] +dev = [{ name = "hatchling", specifier = ">=1.27.0" }] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "trove-classifiers" +version = "2025.9.11.17" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/9a/778622bc06632529817c3c524c82749a112603ae2bbcf72ee3eb33a2c4f1/trove_classifiers-2025.9.11.17.tar.gz", hash = "sha256:931ca9841a5e9c9408bc2ae67b50d28acf85bef56219b56860876dd1f2d024dd", size = 16975, upload-time = "2025-09-11T17:07:50.97Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/85/a4ff8758c66f1fc32aa5e9a145908394bf9cf1c79ffd1113cfdeb77e74e4/trove_classifiers-2025.9.11.17-py3-none-any.whl", hash = "sha256:5d392f2d244deb1866556457d6f3516792124a23d1c3a463a2e8668a5d1c15dd", size = 14158, upload-time = "2025-09-11T17:07:49.886Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, +] + +[[package]] +name = "watchgod" +version = "0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/95/ae723eb1450763e1b2b9e70488054099c661f3b65f0c41d86b868cd6eb16/watchgod-0.7.tar.gz", hash = "sha256:48140d62b0ebe9dd9cf8381337f06351e1f2e70b2203fa9c6eff4e572ca84f29", size = 13580, upload-time = "2021-01-30T16:34:40.323Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/35/9a8da3fb6681e6eba662b2d249eea58cebf575e392271efac3344c172c5f/watchgod-0.7-py3-none-any.whl", hash = "sha256:d6c1ea21df37847ac0537ca0d6c2f4cdf513562e95f77bb93abbcf05573407b7", size = 11661, upload-time = "2021-01-30T16:34:39.24Z" }, +] diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..22d0fe8d --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "klavis", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/static/fireworks-klavis.png b/static/fireworks-klavis.png new file mode 100644 index 00000000..9ae06ac2 Binary files /dev/null and b/static/fireworks-klavis.png differ diff --git a/static/togetherai-klavis.png b/static/togetherai-klavis.png new file mode 100644 index 00000000..ce1b818a Binary files /dev/null and b/static/togetherai-klavis.png differ